]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/rtlanal.c
target.def (rtx_costs): Remove "code" param, add "mode".
[thirdparty/gcc.git] / gcc / rtlanal.c
CommitLineData
af082de3 1/* Analyze RTL for GNU compiler.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
2c88418c 3
1322177d 4This file is part of GCC.
2c88418c 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
2c88418c 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
2c88418c
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
2c88418c
RS
19
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6 23#include "coretypes.h"
c7131fb2
AM
24#include "backend.h"
25#include "tree.h"
9f02e6a5 26#include "rtl.h"
c7131fb2
AM
27#include "df.h"
28#include "diagnostic-core.h"
bc204393
RH
29#include "insn-config.h"
30#include "recog.h"
f894b69b
PB
31#include "target.h"
32#include "output.h"
91ea4f8d 33#include "tm_p.h"
f5eb5fd0 34#include "flags.h"
66fd46b6 35#include "regs.h"
5936d944 36#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
277f65de 37#include "addresses.h"
476dd0ce 38#include "rtl-iter.h"
2c88418c 39
e2373f95 40/* Forward declarations */
7bc980e1 41static void set_of_1 (rtx, const_rtx, void *);
f7d504c2
KG
42static bool covers_regno_p (const_rtx, unsigned int);
43static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
f7d504c2 44static int computed_jump_p_1 (const_rtx);
7bc980e1 45static void parms_set (rtx, const_rtx, void *);
2a1777af 46
ef4bddc2
RS
47static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, machine_mode,
48 const_rtx, machine_mode,
2f93eea8 49 unsigned HOST_WIDE_INT);
ef4bddc2
RS
50static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, machine_mode,
51 const_rtx, machine_mode,
2f93eea8 52 unsigned HOST_WIDE_INT);
ef4bddc2
RS
53static unsigned int cached_num_sign_bit_copies (const_rtx, machine_mode, const_rtx,
54 machine_mode,
2f93eea8 55 unsigned int);
ef4bddc2
RS
56static unsigned int num_sign_bit_copies1 (const_rtx, machine_mode, const_rtx,
57 machine_mode, unsigned int);
2f93eea8 58
476dd0ce
RS
59rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
60rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
61
b12cbf2c
AN
62/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
63 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
64 SIGN_EXTEND then while narrowing we also have to enforce the
65 representation and sign-extend the value to mode DESTINATION_REP.
66
67 If the value is already sign-extended to DESTINATION_REP mode we
68 can just switch to DESTINATION mode on it. For each pair of
69 integral modes SOURCE and DESTINATION, when truncating from SOURCE
70 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
71 contains the number of high-order bits in SOURCE that have to be
72 copies of the sign-bit so that we can do this mode-switch to
73 DESTINATION. */
74
75static unsigned int
76num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
2c88418c 77\f
476dd0ce
RS
78/* Store X into index I of ARRAY. ARRAY is known to have at least I
79 elements. Return the new base of ARRAY. */
80
81template <typename T>
82typename T::value_type *
83generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
84 value_type *base,
85 size_t i, value_type x)
86{
87 if (base == array.stack)
88 {
89 if (i < LOCAL_ELEMS)
90 {
91 base[i] = x;
92 return base;
93 }
94 gcc_checking_assert (i == LOCAL_ELEMS);
cb6f4591
RS
95 /* A previous iteration might also have moved from the stack to the
96 heap, in which case the heap array will already be big enough. */
97 if (vec_safe_length (array.heap) <= i)
98 vec_safe_grow (array.heap, i + 1);
476dd0ce
RS
99 base = array.heap->address ();
100 memcpy (base, array.stack, sizeof (array.stack));
101 base[LOCAL_ELEMS] = x;
102 return base;
103 }
104 unsigned int length = array.heap->length ();
105 if (length > i)
106 {
107 gcc_checking_assert (base == array.heap->address ());
108 base[i] = x;
109 return base;
110 }
111 else
112 {
113 gcc_checking_assert (i == length);
114 vec_safe_push (array.heap, x);
115 return array.heap->address ();
116 }
117}
118
119/* Add the subrtxes of X to worklist ARRAY, starting at END. Return the
120 number of elements added to the worklist. */
121
122template <typename T>
123size_t
124generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
125 value_type *base,
126 size_t end, rtx_type x)
127{
641123eb
RS
128 enum rtx_code code = GET_CODE (x);
129 const char *format = GET_RTX_FORMAT (code);
476dd0ce 130 size_t orig_end = end;
641123eb
RS
131 if (__builtin_expect (INSN_P (x), false))
132 {
133 /* Put the pattern at the top of the queue, since that's what
134 we're likely to want most. It also allows for the SEQUENCE
135 code below. */
136 for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
137 if (format[i] == 'e')
138 {
139 value_type subx = T::get_value (x->u.fld[i].rt_rtx);
140 if (__builtin_expect (end < LOCAL_ELEMS, true))
141 base[end++] = subx;
142 else
143 base = add_single_to_queue (array, base, end++, subx);
144 }
145 }
146 else
147 for (int i = 0; format[i]; ++i)
148 if (format[i] == 'e')
149 {
150 value_type subx = T::get_value (x->u.fld[i].rt_rtx);
151 if (__builtin_expect (end < LOCAL_ELEMS, true))
152 base[end++] = subx;
153 else
154 base = add_single_to_queue (array, base, end++, subx);
155 }
156 else if (format[i] == 'E')
157 {
158 unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
159 rtx *vec = x->u.fld[i].rt_rtvec->elem;
160 if (__builtin_expect (end + length <= LOCAL_ELEMS, true))
161 for (unsigned int j = 0; j < length; j++)
162 base[end++] = T::get_value (vec[j]);
163 else
164 for (unsigned int j = 0; j < length; j++)
165 base = add_single_to_queue (array, base, end++,
166 T::get_value (vec[j]));
167 if (code == SEQUENCE && end == length)
168 /* If the subrtxes of the sequence fill the entire array then
169 we know that no other parts of a containing insn are queued.
170 The caller is therefore iterating over the sequence as a
171 PATTERN (...), so we also want the patterns of the
172 subinstructions. */
173 for (unsigned int j = 0; j < length; j++)
174 {
175 typename T::rtx_type x = T::get_rtx (base[j]);
176 if (INSN_P (x))
177 base[j] = T::get_value (PATTERN (x));
178 }
179 }
476dd0ce
RS
180 return end - orig_end;
181}
182
183template <typename T>
184void
185generic_subrtx_iterator <T>::free_array (array_type &array)
186{
187 vec_free (array.heap);
188}
189
190template <typename T>
191const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
192
193template class generic_subrtx_iterator <const_rtx_accessor>;
194template class generic_subrtx_iterator <rtx_var_accessor>;
195template class generic_subrtx_iterator <rtx_ptr_accessor>;
196
2c88418c
RS
197/* Return 1 if the value of X is unstable
198 (would be different at a different point in the program).
199 The frame pointer, arg pointer, etc. are considered stable
200 (within one function) and so is anything marked `unchanging'. */
201
202int
f7d504c2 203rtx_unstable_p (const_rtx x)
2c88418c 204{
f7d504c2 205 const RTX_CODE code = GET_CODE (x);
b3694847
SS
206 int i;
207 const char *fmt;
2c88418c 208
ae0fb1b9
JW
209 switch (code)
210 {
211 case MEM:
389fdba0 212 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
2c88418c 213
ae0fb1b9 214 case CONST:
d8116890 215 CASE_CONST_ANY:
ae0fb1b9
JW
216 case SYMBOL_REF:
217 case LABEL_REF:
218 return 0;
2c88418c 219
ae0fb1b9
JW
220 case REG:
221 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
c0fc376b 222 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9 223 /* The arg pointer varies if it is not a fixed register. */
389fdba0 224 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 225 return 0;
c0fc376b
RH
226 /* ??? When call-clobbered, the value is stable modulo the restore
227 that must happen after a call. This currently screws up local-alloc
228 into believing that the restore is not needed. */
f8fe0a4a 229 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
c0fc376b 230 return 0;
c0fc376b 231 return 1;
ae0fb1b9
JW
232
233 case ASM_OPERANDS:
234 if (MEM_VOLATILE_P (x))
235 return 1;
236
5d3cc252 237 /* Fall through. */
ae0fb1b9
JW
238
239 default:
240 break;
241 }
2c88418c
RS
242
243 fmt = GET_RTX_FORMAT (code);
244 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
245 if (fmt[i] == 'e')
9c82ac6b
JW
246 {
247 if (rtx_unstable_p (XEXP (x, i)))
248 return 1;
249 }
250 else if (fmt[i] == 'E')
251 {
252 int j;
253 for (j = 0; j < XVECLEN (x, i); j++)
254 if (rtx_unstable_p (XVECEXP (x, i, j)))
255 return 1;
256 }
257
2c88418c
RS
258 return 0;
259}
260
261/* Return 1 if X has a value that can vary even between two
262 executions of the program. 0 means X can be compared reliably
263 against certain constants or near-constants.
e38fe8e0
BS
264 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
265 zero, we are slightly more conservative.
2c88418c
RS
266 The frame pointer and the arg pointer are considered constant. */
267
4f588890
KG
268bool
269rtx_varies_p (const_rtx x, bool for_alias)
2c88418c 270{
e978d62e 271 RTX_CODE code;
b3694847
SS
272 int i;
273 const char *fmt;
2c88418c 274
e978d62e
PB
275 if (!x)
276 return 0;
277
278 code = GET_CODE (x);
2c88418c
RS
279 switch (code)
280 {
281 case MEM:
389fdba0 282 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
55efb413 283
2c88418c 284 case CONST:
d8116890 285 CASE_CONST_ANY:
2c88418c
RS
286 case SYMBOL_REF:
287 case LABEL_REF:
288 return 0;
289
290 case REG:
291 /* Note that we have to test for the actual rtx used for the frame
292 and arg pointers and not just the register number in case we have
293 eliminated the frame and/or arg pointer and are using it
294 for pseudos. */
c0fc376b 295 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9
JL
296 /* The arg pointer varies if it is not a fixed register. */
297 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 298 return 0;
e38fe8e0 299 if (x == pic_offset_table_rtx
e38fe8e0
BS
300 /* ??? When call-clobbered, the value is stable modulo the restore
301 that must happen after a call. This currently screws up
302 local-alloc into believing that the restore is not needed, so we
303 must return 0 only if we are called from alias analysis. */
f8fe0a4a 304 && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
e38fe8e0 305 return 0;
c0fc376b 306 return 1;
2c88418c
RS
307
308 case LO_SUM:
309 /* The operand 0 of a LO_SUM is considered constant
e7d96a83
JW
310 (in fact it is related specifically to operand 1)
311 during alias analysis. */
312 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
313 || rtx_varies_p (XEXP (x, 1), for_alias);
a6a2274a 314
ae0fb1b9
JW
315 case ASM_OPERANDS:
316 if (MEM_VOLATILE_P (x))
317 return 1;
318
5d3cc252 319 /* Fall through. */
ae0fb1b9 320
e9a25f70
JL
321 default:
322 break;
2c88418c
RS
323 }
324
325 fmt = GET_RTX_FORMAT (code);
326 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
327 if (fmt[i] == 'e')
9c82ac6b 328 {
e38fe8e0 329 if (rtx_varies_p (XEXP (x, i), for_alias))
9c82ac6b
JW
330 return 1;
331 }
332 else if (fmt[i] == 'E')
333 {
334 int j;
335 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 336 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
9c82ac6b
JW
337 return 1;
338 }
339
2c88418c
RS
340 return 0;
341}
342
1e677938
BE
343/* Compute an approximation for the offset between the register
344 FROM and TO for the current function, as it was at the start
345 of the routine. */
346
347static HOST_WIDE_INT
348get_initial_register_offset (int from, int to)
349{
350#ifdef ELIMINABLE_REGS
351 static const struct elim_table_t
352 {
353 const int from;
354 const int to;
355 } table[] = ELIMINABLE_REGS;
356 HOST_WIDE_INT offset1, offset2;
357 unsigned int i, j;
358
359 if (to == from)
360 return 0;
361
362 /* It is not safe to call INITIAL_ELIMINATION_OFFSET
363 before the reload pass. We need to give at least
364 an estimation for the resulting frame size. */
365 if (! reload_completed)
366 {
367 offset1 = crtl->outgoing_args_size + get_frame_size ();
368#if !STACK_GROWS_DOWNWARD
369 offset1 = - offset1;
370#endif
371 if (to == STACK_POINTER_REGNUM)
372 return offset1;
373 else if (from == STACK_POINTER_REGNUM)
374 return - offset1;
375 else
376 return 0;
377 }
378
379 for (i = 0; i < ARRAY_SIZE (table); i++)
380 if (table[i].from == from)
381 {
382 if (table[i].to == to)
383 {
384 INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
385 offset1);
386 return offset1;
387 }
388 for (j = 0; j < ARRAY_SIZE (table); j++)
389 {
390 if (table[j].to == to
391 && table[j].from == table[i].to)
392 {
393 INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
394 offset1);
395 INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
396 offset2);
397 return offset1 + offset2;
398 }
399 if (table[j].from == to
400 && table[j].to == table[i].to)
401 {
402 INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
403 offset1);
404 INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
405 offset2);
406 return offset1 - offset2;
407 }
408 }
409 }
410 else if (table[i].to == from)
411 {
412 if (table[i].from == to)
413 {
414 INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
415 offset1);
416 return - offset1;
417 }
418 for (j = 0; j < ARRAY_SIZE (table); j++)
419 {
420 if (table[j].to == to
421 && table[j].from == table[i].from)
422 {
423 INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
424 offset1);
425 INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
426 offset2);
427 return - offset1 + offset2;
428 }
429 if (table[j].from == to
430 && table[j].to == table[i].from)
431 {
432 INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
433 offset1);
434 INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
435 offset2);
436 return - offset1 - offset2;
437 }
438 }
439 }
440
441 /* If the requested register combination was not found,
442 try a different more simple combination. */
443 if (from == ARG_POINTER_REGNUM)
444 return get_initial_register_offset (HARD_FRAME_POINTER_REGNUM, to);
445 else if (to == ARG_POINTER_REGNUM)
446 return get_initial_register_offset (from, HARD_FRAME_POINTER_REGNUM);
447 else if (from == HARD_FRAME_POINTER_REGNUM)
448 return get_initial_register_offset (FRAME_POINTER_REGNUM, to);
449 else if (to == HARD_FRAME_POINTER_REGNUM)
450 return get_initial_register_offset (from, FRAME_POINTER_REGNUM);
451 else
452 return 0;
453
454#else
455 HOST_WIDE_INT offset;
456
457 if (to == from)
458 return 0;
459
460 if (reload_completed)
461 {
462 INITIAL_FRAME_POINTER_OFFSET (offset);
463 }
464 else
465 {
466 offset = crtl->outgoing_args_size + get_frame_size ();
467#if !STACK_GROWS_DOWNWARD
468 offset = - offset;
469#endif
470 }
471
472 if (to == STACK_POINTER_REGNUM)
473 return offset;
474 else if (from == STACK_POINTER_REGNUM)
475 return - offset;
476 else
477 return 0;
478
479#endif
480}
481
c7e30a96
EB
482/* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE
483 bytes can cause a trap. MODE is the mode of the MEM (not that of X) and
484 UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory
485 references on strict alignment machines. */
2c88418c 486
2358ff91 487static int
48e8382e 488rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
ef4bddc2 489 machine_mode mode, bool unaligned_mems)
2c88418c 490{
b3694847 491 enum rtx_code code = GET_CODE (x);
2c88418c 492
c7e30a96
EB
493 /* The offset must be a multiple of the mode size if we are considering
494 unaligned memory references on strict alignment machines. */
495 if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0)
48e8382e
PB
496 {
497 HOST_WIDE_INT actual_offset = offset;
c7e30a96 498
48e8382e
PB
499#ifdef SPARC_STACK_BOUNDARY_HACK
500 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
501 the real alignment of %sp. However, when it does this, the
502 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
503 if (SPARC_STACK_BOUNDARY_HACK
504 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
505 actual_offset -= STACK_POINTER_OFFSET;
506#endif
507
65a74b5d
PB
508 if (actual_offset % GET_MODE_SIZE (mode) != 0)
509 return 1;
48e8382e
PB
510 }
511
2c88418c
RS
512 switch (code)
513 {
514 case SYMBOL_REF:
48e8382e
PB
515 if (SYMBOL_REF_WEAK (x))
516 return 1;
517 if (!CONSTANT_POOL_ADDRESS_P (x))
518 {
519 tree decl;
520 HOST_WIDE_INT decl_size;
521
522 if (offset < 0)
523 return 1;
524 if (size == 0)
525 size = GET_MODE_SIZE (mode);
526 if (size == 0)
527 return offset != 0;
528
529 /* If the size of the access or of the symbol is unknown,
530 assume the worst. */
531 decl = SYMBOL_REF_DECL (x);
532
533 /* Else check that the access is in bounds. TODO: restructure
71c00b5c 534 expr_size/tree_expr_size/int_expr_size and just use the latter. */
48e8382e
PB
535 if (!decl)
536 decl_size = -1;
537 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
9541ffee 538 decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl))
9439e9a1 539 ? tree_to_shwi (DECL_SIZE_UNIT (decl))
48e8382e
PB
540 : -1);
541 else if (TREE_CODE (decl) == STRING_CST)
542 decl_size = TREE_STRING_LENGTH (decl);
543 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
544 decl_size = int_size_in_bytes (TREE_TYPE (decl));
545 else
546 decl_size = -1;
547
548 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
549 }
550
551 return 0;
ff0b6b99 552
2c88418c 553 case LABEL_REF:
2c88418c
RS
554 return 0;
555
556 case REG:
c7e30a96
EB
557 /* Stack references are assumed not to trap, but we need to deal with
558 nonsensical offsets. */
1e677938
BE
559 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
560 || x == stack_pointer_rtx
561 /* The arg pointer varies if it is not a fixed register. */
562 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c7e30a96 563 {
1e677938
BE
564#ifdef RED_ZONE_SIZE
565 HOST_WIDE_INT red_zone_size = RED_ZONE_SIZE;
566#else
567 HOST_WIDE_INT red_zone_size = 0;
568#endif
569 HOST_WIDE_INT stack_boundary = PREFERRED_STACK_BOUNDARY
570 / BITS_PER_UNIT;
571 HOST_WIDE_INT low_bound, high_bound;
572
c7e30a96
EB
573 if (size == 0)
574 size = GET_MODE_SIZE (mode);
1e677938
BE
575
576 if (x == frame_pointer_rtx)
577 {
578 if (FRAME_GROWS_DOWNWARD)
579 {
580 high_bound = STARTING_FRAME_OFFSET;
581 low_bound = high_bound - get_frame_size ();
582 }
583 else
584 {
585 low_bound = STARTING_FRAME_OFFSET;
586 high_bound = low_bound + get_frame_size ();
587 }
588 }
589 else if (x == hard_frame_pointer_rtx)
c7e30a96 590 {
1e677938
BE
591 HOST_WIDE_INT sp_offset
592 = get_initial_register_offset (STACK_POINTER_REGNUM,
593 HARD_FRAME_POINTER_REGNUM);
594 HOST_WIDE_INT ap_offset
595 = get_initial_register_offset (ARG_POINTER_REGNUM,
596 HARD_FRAME_POINTER_REGNUM);
597
598#if STACK_GROWS_DOWNWARD
599 low_bound = sp_offset - red_zone_size - stack_boundary;
600 high_bound = ap_offset
601 + FIRST_PARM_OFFSET (current_function_decl)
602#if !ARGS_GROW_DOWNWARD
603 + crtl->args.size
604#endif
605 + stack_boundary;
606#else
607 high_bound = sp_offset + red_zone_size + stack_boundary;
608 low_bound = ap_offset
609 + FIRST_PARM_OFFSET (current_function_decl)
610#if ARGS_GROW_DOWNWARD
611 - crtl->args.size
612#endif
613 - stack_boundary;
614#endif
615 }
616 else if (x == stack_pointer_rtx)
617 {
618 HOST_WIDE_INT ap_offset
619 = get_initial_register_offset (ARG_POINTER_REGNUM,
620 STACK_POINTER_REGNUM);
621
622#if STACK_GROWS_DOWNWARD
623 low_bound = - red_zone_size - stack_boundary;
624 high_bound = ap_offset
625 + FIRST_PARM_OFFSET (current_function_decl)
626#if !ARGS_GROW_DOWNWARD
627 + crtl->args.size
628#endif
629 + stack_boundary;
630#else
631 high_bound = red_zone_size + stack_boundary;
632 low_bound = ap_offset
633 + FIRST_PARM_OFFSET (current_function_decl)
634#if ARGS_GROW_DOWNWARD
635 - crtl->args.size
636#endif
637 - stack_boundary;
638#endif
c7e30a96
EB
639 }
640 else
641 {
1e677938
BE
642 /* We assume that accesses are safe to at least the
643 next stack boundary.
644 Examples are varargs and __builtin_return_address. */
645#if ARGS_GROW_DOWNWARD
646 high_bound = FIRST_PARM_OFFSET (current_function_decl)
647 + stack_boundary;
648 low_bound = FIRST_PARM_OFFSET (current_function_decl)
649 - crtl->args.size - stack_boundary;
650#else
651 low_bound = FIRST_PARM_OFFSET (current_function_decl)
652 - stack_boundary;
653 high_bound = FIRST_PARM_OFFSET (current_function_decl)
654 + crtl->args.size + stack_boundary;
655#endif
c7e30a96 656 }
1e677938
BE
657
658 if (offset >= low_bound && offset <= high_bound - size)
659 return 0;
660 return 1;
c7e30a96 661 }
4f73495e
RH
662 /* All of the virtual frame registers are stack references. */
663 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
664 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
665 return 0;
666 return 1;
2c88418c
RS
667
668 case CONST:
48e8382e
PB
669 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
670 mode, unaligned_mems);
2c88418c
RS
671
672 case PLUS:
2358ff91 673 /* An address is assumed not to trap if:
48e8382e
PB
674 - it is the pic register plus a constant. */
675 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
676 return 0;
677
c7e30a96 678 /* - or it is an address that can't trap plus a constant integer. */
481683e1 679 if (CONST_INT_P (XEXP (x, 1))
48e8382e
PB
680 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
681 size, mode, unaligned_mems))
2358ff91
EB
682 return 0;
683
684 return 1;
2c88418c
RS
685
686 case LO_SUM:
4f73495e 687 case PRE_MODIFY:
48e8382e
PB
688 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
689 mode, unaligned_mems);
4f73495e
RH
690
691 case PRE_DEC:
692 case PRE_INC:
693 case POST_DEC:
694 case POST_INC:
695 case POST_MODIFY:
48e8382e
PB
696 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
697 mode, unaligned_mems);
4f73495e 698
e9a25f70
JL
699 default:
700 break;
2c88418c
RS
701 }
702
703 /* If it isn't one of the case above, it can cause a trap. */
704 return 1;
705}
706
2358ff91
EB
707/* Return nonzero if the use of X as an address in a MEM can cause a trap. */
708
709int
f7d504c2 710rtx_addr_can_trap_p (const_rtx x)
2358ff91 711{
48e8382e 712 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
2358ff91
EB
713}
714
4977bab6
ZW
715/* Return true if X is an address that is known to not be zero. */
716
717bool
f7d504c2 718nonzero_address_p (const_rtx x)
4977bab6 719{
f7d504c2 720 const enum rtx_code code = GET_CODE (x);
4977bab6
ZW
721
722 switch (code)
723 {
724 case SYMBOL_REF:
725 return !SYMBOL_REF_WEAK (x);
726
727 case LABEL_REF:
728 return true;
729
4977bab6
ZW
730 case REG:
731 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
732 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
733 || x == stack_pointer_rtx
734 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
735 return true;
736 /* All of the virtual frame registers are stack references. */
737 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
738 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
739 return true;
740 return false;
741
742 case CONST:
743 return nonzero_address_p (XEXP (x, 0));
744
745 case PLUS:
4977bab6 746 /* Handle PIC references. */
bc2164e8 747 if (XEXP (x, 0) == pic_offset_table_rtx
4977bab6
ZW
748 && CONSTANT_P (XEXP (x, 1)))
749 return true;
750 return false;
751
752 case PRE_MODIFY:
753 /* Similar to the above; allow positive offsets. Further, since
754 auto-inc is only allowed in memories, the register must be a
755 pointer. */
481683e1 756 if (CONST_INT_P (XEXP (x, 1))
4977bab6
ZW
757 && INTVAL (XEXP (x, 1)) > 0)
758 return true;
759 return nonzero_address_p (XEXP (x, 0));
760
761 case PRE_INC:
762 /* Similarly. Further, the offset is always positive. */
763 return true;
764
765 case PRE_DEC:
766 case POST_DEC:
767 case POST_INC:
768 case POST_MODIFY:
769 return nonzero_address_p (XEXP (x, 0));
770
771 case LO_SUM:
772 return nonzero_address_p (XEXP (x, 1));
773
774 default:
775 break;
776 }
777
778 /* If it isn't one of the case above, might be zero. */
779 return false;
780}
781
a6a2274a 782/* Return 1 if X refers to a memory location whose address
2c88418c 783 cannot be compared reliably with constant addresses,
a6a2274a 784 or if X refers to a BLKmode memory object.
e38fe8e0
BS
785 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
786 zero, we are slightly more conservative. */
2c88418c 787
4f588890
KG
788bool
789rtx_addr_varies_p (const_rtx x, bool for_alias)
2c88418c 790{
b3694847
SS
791 enum rtx_code code;
792 int i;
793 const char *fmt;
2c88418c
RS
794
795 if (x == 0)
796 return 0;
797
798 code = GET_CODE (x);
799 if (code == MEM)
e38fe8e0 800 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
2c88418c
RS
801
802 fmt = GET_RTX_FORMAT (code);
803 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
804 if (fmt[i] == 'e')
833c0b26 805 {
e38fe8e0 806 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
833c0b26
RK
807 return 1;
808 }
809 else if (fmt[i] == 'E')
810 {
811 int j;
812 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 813 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
833c0b26
RK
814 return 1;
815 }
2c88418c
RS
816 return 0;
817}
818\f
da4fdf2d
SB
819/* Return the CALL in X if there is one. */
820
821rtx
822get_call_rtx_from (rtx x)
823{
824 if (INSN_P (x))
825 x = PATTERN (x);
826 if (GET_CODE (x) == PARALLEL)
827 x = XVECEXP (x, 0, 0);
828 if (GET_CODE (x) == SET)
829 x = SET_SRC (x);
830 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
831 return x;
832 return NULL_RTX;
833}
834\f
2c88418c
RS
835/* Return the value of the integer term in X, if one is apparent;
836 otherwise return 0.
837 Only obvious integer terms are detected.
3ef42a0c 838 This is used in cse.c with the `related_value' field. */
2c88418c 839
c166a311 840HOST_WIDE_INT
f7d504c2 841get_integer_term (const_rtx x)
2c88418c
RS
842{
843 if (GET_CODE (x) == CONST)
844 x = XEXP (x, 0);
845
846 if (GET_CODE (x) == MINUS
481683e1 847 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
848 return - INTVAL (XEXP (x, 1));
849 if (GET_CODE (x) == PLUS
481683e1 850 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
851 return INTVAL (XEXP (x, 1));
852 return 0;
853}
854
855/* If X is a constant, return the value sans apparent integer term;
856 otherwise return 0.
857 Only obvious integer terms are detected. */
858
859rtx
f7d504c2 860get_related_value (const_rtx x)
2c88418c
RS
861{
862 if (GET_CODE (x) != CONST)
863 return 0;
864 x = XEXP (x, 0);
865 if (GET_CODE (x) == PLUS
481683e1 866 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
867 return XEXP (x, 0);
868 else if (GET_CODE (x) == MINUS
481683e1 869 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
870 return XEXP (x, 0);
871 return 0;
872}
873\f
7ffb5e78
RS
874/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
875 to somewhere in the same object or object_block as SYMBOL. */
876
877bool
f7d504c2 878offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
7ffb5e78
RS
879{
880 tree decl;
881
882 if (GET_CODE (symbol) != SYMBOL_REF)
883 return false;
884
885 if (offset == 0)
886 return true;
887
888 if (offset > 0)
889 {
890 if (CONSTANT_POOL_ADDRESS_P (symbol)
891 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
892 return true;
893
894 decl = SYMBOL_REF_DECL (symbol);
895 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
896 return true;
897 }
898
899 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
900 && SYMBOL_REF_BLOCK (symbol)
901 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
902 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
903 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
904 return true;
905
906 return false;
907}
908
909/* Split X into a base and a constant offset, storing them in *BASE_OUT
910 and *OFFSET_OUT respectively. */
911
912void
913split_const (rtx x, rtx *base_out, rtx *offset_out)
914{
915 if (GET_CODE (x) == CONST)
916 {
917 x = XEXP (x, 0);
481683e1 918 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
7ffb5e78
RS
919 {
920 *base_out = XEXP (x, 0);
921 *offset_out = XEXP (x, 1);
922 return;
923 }
924 }
925 *base_out = x;
926 *offset_out = const0_rtx;
927}
928\f
4b983fdc
RH
929/* Return the number of places FIND appears within X. If COUNT_DEST is
930 zero, we do not count occurrences inside the destination of a SET. */
931
932int
f7d504c2 933count_occurrences (const_rtx x, const_rtx find, int count_dest)
4b983fdc
RH
934{
935 int i, j;
936 enum rtx_code code;
937 const char *format_ptr;
938 int count;
939
940 if (x == find)
941 return 1;
942
943 code = GET_CODE (x);
944
945 switch (code)
946 {
947 case REG:
d8116890 948 CASE_CONST_ANY:
4b983fdc
RH
949 case SYMBOL_REF:
950 case CODE_LABEL:
951 case PC:
952 case CC0:
953 return 0;
954
2372a062
BS
955 case EXPR_LIST:
956 count = count_occurrences (XEXP (x, 0), find, count_dest);
957 if (XEXP (x, 1))
958 count += count_occurrences (XEXP (x, 1), find, count_dest);
959 return count;
b8698a0f 960
4b983fdc 961 case MEM:
3c0cb5de 962 if (MEM_P (find) && rtx_equal_p (x, find))
4b983fdc
RH
963 return 1;
964 break;
965
966 case SET:
967 if (SET_DEST (x) == find && ! count_dest)
968 return count_occurrences (SET_SRC (x), find, count_dest);
969 break;
970
971 default:
972 break;
973 }
974
975 format_ptr = GET_RTX_FORMAT (code);
976 count = 0;
977
978 for (i = 0; i < GET_RTX_LENGTH (code); i++)
979 {
980 switch (*format_ptr++)
981 {
982 case 'e':
983 count += count_occurrences (XEXP (x, i), find, count_dest);
984 break;
985
986 case 'E':
987 for (j = 0; j < XVECLEN (x, i); j++)
988 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
989 break;
990 }
991 }
992 return count;
993}
6fb5fa3c 994
7bc14a04
PB
995\f
996/* Return TRUE if OP is a register or subreg of a register that
997 holds an unsigned quantity. Otherwise, return FALSE. */
998
999bool
1000unsigned_reg_p (rtx op)
1001{
1002 if (REG_P (op)
1003 && REG_EXPR (op)
1004 && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
1005 return true;
1006
1007 if (GET_CODE (op) == SUBREG
362d42dc 1008 && SUBREG_PROMOTED_SIGN (op))
7bc14a04
PB
1009 return true;
1010
1011 return false;
1012}
1013
4b983fdc 1014\f
2c88418c
RS
1015/* Nonzero if register REG appears somewhere within IN.
1016 Also works if REG is not a register; in this case it checks
1017 for a subexpression of IN that is Lisp "equal" to REG. */
1018
1019int
f7d504c2 1020reg_mentioned_p (const_rtx reg, const_rtx in)
2c88418c 1021{
b3694847
SS
1022 const char *fmt;
1023 int i;
1024 enum rtx_code code;
2c88418c
RS
1025
1026 if (in == 0)
1027 return 0;
1028
1029 if (reg == in)
1030 return 1;
1031
1032 if (GET_CODE (in) == LABEL_REF)
a827d9b1 1033 return reg == LABEL_REF_LABEL (in);
2c88418c
RS
1034
1035 code = GET_CODE (in);
1036
1037 switch (code)
1038 {
1039 /* Compare registers by number. */
1040 case REG:
f8cfc6aa 1041 return REG_P (reg) && REGNO (in) == REGNO (reg);
2c88418c
RS
1042
1043 /* These codes have no constituent expressions
1044 and are unique. */
1045 case SCRATCH:
1046 case CC0:
1047 case PC:
1048 return 0;
1049
d8116890 1050 CASE_CONST_ANY:
2c88418c
RS
1051 /* These are kept unique for a given value. */
1052 return 0;
a6a2274a 1053
e9a25f70
JL
1054 default:
1055 break;
2c88418c
RS
1056 }
1057
1058 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
1059 return 1;
1060
1061 fmt = GET_RTX_FORMAT (code);
1062
1063 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1064 {
1065 if (fmt[i] == 'E')
1066 {
b3694847 1067 int j;
2c88418c
RS
1068 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
1069 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
1070 return 1;
1071 }
1072 else if (fmt[i] == 'e'
1073 && reg_mentioned_p (reg, XEXP (in, i)))
1074 return 1;
1075 }
1076 return 0;
1077}
1078\f
1079/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
1080 no CODE_LABEL insn. */
1081
1082int
b32d5189 1083no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
2c88418c 1084{
b32d5189 1085 rtx_insn *p;
978f547f
JH
1086 if (beg == end)
1087 return 0;
2c88418c 1088 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
4b4bf941 1089 if (LABEL_P (p))
2c88418c
RS
1090 return 0;
1091 return 1;
1092}
1093
1094/* Nonzero if register REG is used in an insn between
1095 FROM_INSN and TO_INSN (exclusive of those two). */
1096
1097int
b32d5189
DM
1098reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
1099 const rtx_insn *to_insn)
2c88418c 1100{
1bbbc4a3 1101 rtx_insn *insn;
2c88418c
RS
1102
1103 if (from_insn == to_insn)
1104 return 0;
1105
1106 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
b5b8b0ac 1107 if (NONDEBUG_INSN_P (insn)
8f3e7a26 1108 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
76dd5923 1109 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
2c88418c
RS
1110 return 1;
1111 return 0;
1112}
1113\f
1114/* Nonzero if the old value of X, a register, is referenced in BODY. If X
1115 is entirely replaced by a new value and the only use is as a SET_DEST,
1116 we do not consider it a reference. */
1117
1118int
f7d504c2 1119reg_referenced_p (const_rtx x, const_rtx body)
2c88418c
RS
1120{
1121 int i;
1122
1123 switch (GET_CODE (body))
1124 {
1125 case SET:
1126 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
1127 return 1;
1128
1129 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
1130 of a REG that occupies all of the REG, the insn references X if
1131 it is mentioned in the destination. */
1132 if (GET_CODE (SET_DEST (body)) != CC0
1133 && GET_CODE (SET_DEST (body)) != PC
f8cfc6aa 1134 && !REG_P (SET_DEST (body))
2c88418c 1135 && ! (GET_CODE (SET_DEST (body)) == SUBREG
f8cfc6aa 1136 && REG_P (SUBREG_REG (SET_DEST (body)))
2c88418c
RS
1137 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
1138 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1139 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
1140 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1141 && reg_overlap_mentioned_p (x, SET_DEST (body)))
1142 return 1;
e9a25f70 1143 return 0;
2c88418c
RS
1144
1145 case ASM_OPERANDS:
1146 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1147 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
1148 return 1;
e9a25f70 1149 return 0;
2c88418c
RS
1150
1151 case CALL:
1152 case USE:
14a774a9 1153 case IF_THEN_ELSE:
2c88418c
RS
1154 return reg_overlap_mentioned_p (x, body);
1155
1156 case TRAP_IF:
1157 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
1158
21b8482a
JJ
1159 case PREFETCH:
1160 return reg_overlap_mentioned_p (x, XEXP (body, 0));
1161
2ac4fed0
RK
1162 case UNSPEC:
1163 case UNSPEC_VOLATILE:
2f9fb4c2
R
1164 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1165 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
1166 return 1;
1167 return 0;
1168
2c88418c
RS
1169 case PARALLEL:
1170 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1171 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
1172 return 1;
e9a25f70 1173 return 0;
a6a2274a 1174
0d3ffb5a 1175 case CLOBBER:
3c0cb5de 1176 if (MEM_P (XEXP (body, 0)))
0d3ffb5a
GK
1177 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
1178 return 1;
1179 return 0;
1180
0c99ec5c
RH
1181 case COND_EXEC:
1182 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
1183 return 1;
1184 return reg_referenced_p (x, COND_EXEC_CODE (body));
1185
e9a25f70
JL
1186 default:
1187 return 0;
2c88418c 1188 }
2c88418c 1189}
2c88418c
RS
1190\f
1191/* Nonzero if register REG is set or clobbered in an insn between
1192 FROM_INSN and TO_INSN (exclusive of those two). */
1193
1194int
a5d567ec
DM
1195reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
1196 const rtx_insn *to_insn)
2c88418c 1197{
1bbbc4a3 1198 const rtx_insn *insn;
2c88418c
RS
1199
1200 if (from_insn == to_insn)
1201 return 0;
1202
1203 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 1204 if (INSN_P (insn) && reg_set_p (reg, insn))
2c88418c
RS
1205 return 1;
1206 return 0;
1207}
1208
1209/* Internals of reg_set_between_p. */
2c88418c 1210int
ed7a4b4b 1211reg_set_p (const_rtx reg, const_rtx insn)
2c88418c 1212{
d9a5f0cc
OE
1213 /* After delay slot handling, call and branch insns might be in a
1214 sequence. Check all the elements there. */
1215 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1216 {
1217 for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i)
1218 if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i)))
1219 return true;
1220
1221 return false;
1222 }
1223
2c88418c
RS
1224 /* We can be passed an insn or part of one. If we are passed an insn,
1225 check if a side-effect of the insn clobbers REG. */
4977bab6
ZW
1226 if (INSN_P (insn)
1227 && (FIND_REG_INC_NOTE (insn, reg)
4b4bf941 1228 || (CALL_P (insn)
f8cfc6aa 1229 && ((REG_P (reg)
4f1605d2 1230 && REGNO (reg) < FIRST_PSEUDO_REGISTER
5da20cfe
RS
1231 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
1232 GET_MODE (reg), REGNO (reg)))
3c0cb5de 1233 || MEM_P (reg)
4977bab6 1234 || find_reg_fusage (insn, CLOBBER, reg)))))
d9a5f0cc 1235 return true;
2c88418c 1236
91b2d119 1237 return set_of (reg, insn) != NULL_RTX;
2c88418c
RS
1238}
1239
1240/* Similar to reg_set_between_p, but check all registers in X. Return 0
1241 only if none of them are modified between START and END. Return 1 if
fa10beec 1242 X contains a MEM; this routine does use memory aliasing. */
2c88418c
RS
1243
1244int
8f6bce51 1245modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
2c88418c 1246{
9678086d 1247 const enum rtx_code code = GET_CODE (x);
6f7d635c 1248 const char *fmt;
f8163c92 1249 int i, j;
1bbbc4a3 1250 rtx_insn *insn;
7b52eede
JH
1251
1252 if (start == end)
1253 return 0;
2c88418c
RS
1254
1255 switch (code)
1256 {
d8116890 1257 CASE_CONST_ANY:
2c88418c
RS
1258 case CONST:
1259 case SYMBOL_REF:
1260 case LABEL_REF:
1261 return 0;
1262
1263 case PC:
1264 case CC0:
1265 return 1;
1266
1267 case MEM:
7b52eede 1268 if (modified_between_p (XEXP (x, 0), start, end))
2c88418c 1269 return 1;
550b7784
KK
1270 if (MEM_READONLY_P (x))
1271 return 0;
7b52eede
JH
1272 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1273 if (memory_modified_in_insn_p (x, insn))
1274 return 1;
1275 return 0;
2c88418c
RS
1276 break;
1277
1278 case REG:
1279 return reg_set_between_p (x, start, end);
a6a2274a 1280
e9a25f70
JL
1281 default:
1282 break;
2c88418c
RS
1283 }
1284
1285 fmt = GET_RTX_FORMAT (code);
1286 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
f8163c92
RK
1287 {
1288 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1289 return 1;
1290
d4757e6a 1291 else if (fmt[i] == 'E')
f8163c92
RK
1292 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1293 if (modified_between_p (XVECEXP (x, i, j), start, end))
1294 return 1;
1295 }
1296
1297 return 0;
1298}
1299
1300/* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1301 of them are modified in INSN. Return 1 if X contains a MEM; this routine
7b52eede 1302 does use memory aliasing. */
f8163c92
RK
1303
1304int
9678086d 1305modified_in_p (const_rtx x, const_rtx insn)
f8163c92 1306{
9678086d 1307 const enum rtx_code code = GET_CODE (x);
6f7d635c 1308 const char *fmt;
f8163c92
RK
1309 int i, j;
1310
1311 switch (code)
1312 {
d8116890 1313 CASE_CONST_ANY:
f8163c92
RK
1314 case CONST:
1315 case SYMBOL_REF:
1316 case LABEL_REF:
1317 return 0;
1318
1319 case PC:
1320 case CC0:
2c88418c
RS
1321 return 1;
1322
f8163c92 1323 case MEM:
7b52eede 1324 if (modified_in_p (XEXP (x, 0), insn))
f8163c92 1325 return 1;
550b7784
KK
1326 if (MEM_READONLY_P (x))
1327 return 0;
7b52eede
JH
1328 if (memory_modified_in_insn_p (x, insn))
1329 return 1;
1330 return 0;
f8163c92
RK
1331 break;
1332
1333 case REG:
1334 return reg_set_p (x, insn);
e9a25f70
JL
1335
1336 default:
1337 break;
f8163c92
RK
1338 }
1339
1340 fmt = GET_RTX_FORMAT (code);
1341 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1342 {
1343 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1344 return 1;
1345
d4757e6a 1346 else if (fmt[i] == 'E')
f8163c92
RK
1347 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1348 if (modified_in_p (XVECEXP (x, i, j), insn))
1349 return 1;
1350 }
1351
2c88418c
RS
1352 return 0;
1353}
1354\f
91b2d119
JH
1355/* Helper function for set_of. */
1356struct set_of_data
1357 {
7bc980e1
KG
1358 const_rtx found;
1359 const_rtx pat;
91b2d119
JH
1360 };
1361
1362static void
7bc980e1 1363set_of_1 (rtx x, const_rtx pat, void *data1)
91b2d119 1364{
7bc980e1
KG
1365 struct set_of_data *const data = (struct set_of_data *) (data1);
1366 if (rtx_equal_p (x, data->pat)
1367 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1368 data->found = pat;
91b2d119
JH
1369}
1370
1371/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
eaec9b3d 1372 (either directly or via STRICT_LOW_PART and similar modifiers). */
7bc980e1
KG
1373const_rtx
1374set_of (const_rtx pat, const_rtx insn)
91b2d119
JH
1375{
1376 struct set_of_data data;
1377 data.found = NULL_RTX;
1378 data.pat = pat;
1379 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1380 return data.found;
1381}
e2724e63 1382
f7d0b0fc
RS
1383/* Add all hard register in X to *PSET. */
1384void
1385find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
1386{
1387 subrtx_iterator::array_type array;
1388 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1389 {
1390 const_rtx x = *iter;
1391 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1392 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1393 }
1394}
1395
e2724e63
BS
1396/* This function, called through note_stores, collects sets and
1397 clobbers of hard registers in a HARD_REG_SET, which is pointed to
1398 by DATA. */
1399void
1400record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1401{
1402 HARD_REG_SET *pset = (HARD_REG_SET *)data;
1403 if (REG_P (x) && HARD_REGISTER_P (x))
1404 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1405}
1406
1407/* Examine INSN, and compute the set of hard registers written by it.
1408 Store it in *PSET. Should only be called after reload. */
1409void
fd769c94 1410find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit)
e2724e63
BS
1411{
1412 rtx link;
1413
1414 CLEAR_HARD_REG_SET (*pset);
1415 note_stores (PATTERN (insn), record_hard_reg_sets, pset);
3ee634fd
TV
1416 if (CALL_P (insn))
1417 {
1418 if (implicit)
1419 IOR_HARD_REG_SET (*pset, call_used_reg_set);
1420
1421 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1422 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1423 }
e2724e63
BS
1424 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1425 if (REG_NOTE_KIND (link) == REG_INC)
1426 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1427}
1428
e2724e63
BS
1429/* Like record_hard_reg_sets, but called through note_uses. */
1430void
1431record_hard_reg_uses (rtx *px, void *data)
1432{
f7d0b0fc 1433 find_all_hard_regs (*px, (HARD_REG_SET *) data);
e2724e63 1434}
91b2d119 1435\f
2c88418c
RS
1436/* Given an INSN, return a SET expression if this insn has only a single SET.
1437 It may also have CLOBBERs, USEs, or SET whose output
1438 will not be used, which we ignore. */
1439
1440rtx
e8a54173 1441single_set_2 (const rtx_insn *insn, const_rtx pat)
2c88418c 1442{
c9b89a21
JH
1443 rtx set = NULL;
1444 int set_verified = 1;
2c88418c 1445 int i;
c9b89a21 1446
b1cdafbb 1447 if (GET_CODE (pat) == PARALLEL)
2c88418c 1448 {
c9b89a21 1449 for (i = 0; i < XVECLEN (pat, 0); i++)
b1cdafbb 1450 {
c9b89a21
JH
1451 rtx sub = XVECEXP (pat, 0, i);
1452 switch (GET_CODE (sub))
1453 {
1454 case USE:
1455 case CLOBBER:
1456 break;
1457
1458 case SET:
1459 /* We can consider insns having multiple sets, where all
1460 but one are dead as single set insns. In common case
1461 only single set is present in the pattern so we want
f63d1bf7 1462 to avoid checking for REG_UNUSED notes unless necessary.
c9b89a21
JH
1463
1464 When we reach set first time, we just expect this is
1465 the single set we are looking for and only when more
1466 sets are found in the insn, we check them. */
1467 if (!set_verified)
1468 {
1469 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1470 && !side_effects_p (set))
1471 set = NULL;
1472 else
1473 set_verified = 1;
1474 }
1475 if (!set)
1476 set = sub, set_verified = 0;
1477 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1478 || side_effects_p (sub))
1479 return NULL_RTX;
1480 break;
1481
1482 default:
1483 return NULL_RTX;
1484 }
787ccee0 1485 }
2c88418c 1486 }
c9b89a21 1487 return set;
2c88418c 1488}
941c63ac
JL
1489
1490/* Given an INSN, return nonzero if it has more than one SET, else return
1491 zero. */
1492
5f7d3786 1493int
f7d504c2 1494multiple_sets (const_rtx insn)
941c63ac 1495{
cae8acdd 1496 int found;
941c63ac 1497 int i;
a6a2274a 1498
941c63ac 1499 /* INSN must be an insn. */
2c3c49de 1500 if (! INSN_P (insn))
941c63ac
JL
1501 return 0;
1502
1503 /* Only a PARALLEL can have multiple SETs. */
1504 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1505 {
1506 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1507 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1508 {
1509 /* If we have already found a SET, then return now. */
1510 if (found)
1511 return 1;
1512 else
1513 found = 1;
1514 }
1515 }
a6a2274a 1516
941c63ac
JL
1517 /* Either zero or one SET. */
1518 return 0;
1519}
2c88418c 1520\f
7142e318
JW
1521/* Return nonzero if the destination of SET equals the source
1522 and there are no side effects. */
1523
1524int
f7d504c2 1525set_noop_p (const_rtx set)
7142e318
JW
1526{
1527 rtx src = SET_SRC (set);
1528 rtx dst = SET_DEST (set);
1529
371b8fc0
JH
1530 if (dst == pc_rtx && src == pc_rtx)
1531 return 1;
1532
3c0cb5de 1533 if (MEM_P (dst) && MEM_P (src))
cd648cec
JH
1534 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1535
46d096a3 1536 if (GET_CODE (dst) == ZERO_EXTRACT)
7142e318 1537 return rtx_equal_p (XEXP (dst, 0), src)
cd648cec
JH
1538 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1539 && !side_effects_p (src);
7142e318
JW
1540
1541 if (GET_CODE (dst) == STRICT_LOW_PART)
1542 dst = XEXP (dst, 0);
1543
1544 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1545 {
1546 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1547 return 0;
1548 src = SUBREG_REG (src);
1549 dst = SUBREG_REG (dst);
1550 }
1551
8c895291
TB
1552 /* It is a NOOP if destination overlaps with selected src vector
1553 elements. */
1554 if (GET_CODE (src) == VEC_SELECT
1555 && REG_P (XEXP (src, 0)) && REG_P (dst)
1556 && HARD_REGISTER_P (XEXP (src, 0))
1557 && HARD_REGISTER_P (dst))
1558 {
1559 int i;
1560 rtx par = XEXP (src, 1);
1561 rtx src0 = XEXP (src, 0);
1562 int c0 = INTVAL (XVECEXP (par, 0, 0));
1563 HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
1564
1565 for (i = 1; i < XVECLEN (par, 0); i++)
1566 if (INTVAL (XVECEXP (par, 0, i)) != c0 + i)
1567 return 0;
1568 return
1569 simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
1570 offset, GET_MODE (dst)) == (int) REGNO (dst);
1571 }
1572
f8cfc6aa 1573 return (REG_P (src) && REG_P (dst)
7142e318
JW
1574 && REGNO (src) == REGNO (dst));
1575}
0005550b
JH
1576\f
1577/* Return nonzero if an insn consists only of SETs, each of which only sets a
1578 value to itself. */
1579
1580int
8a1b6388 1581noop_move_p (const rtx_insn *insn)
0005550b
JH
1582{
1583 rtx pat = PATTERN (insn);
1584
b5832b43
JH
1585 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1586 return 1;
1587
0005550b
JH
1588 /* Insns carrying these notes are useful later on. */
1589 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1590 return 0;
1591
8f7e6e33
BC
1592 /* Check the code to be executed for COND_EXEC. */
1593 if (GET_CODE (pat) == COND_EXEC)
1594 pat = COND_EXEC_CODE (pat);
1595
0005550b
JH
1596 if (GET_CODE (pat) == SET && set_noop_p (pat))
1597 return 1;
1598
1599 if (GET_CODE (pat) == PARALLEL)
1600 {
1601 int i;
1602 /* If nothing but SETs of registers to themselves,
1603 this insn can also be deleted. */
1604 for (i = 0; i < XVECLEN (pat, 0); i++)
1605 {
1606 rtx tem = XVECEXP (pat, 0, i);
1607
1608 if (GET_CODE (tem) == USE
1609 || GET_CODE (tem) == CLOBBER)
1610 continue;
1611
1612 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1613 return 0;
1614 }
1615
1616 return 1;
1617 }
1618 return 0;
1619}
1620\f
7142e318 1621
2c88418c
RS
1622/* Return nonzero if register in range [REGNO, ENDREGNO)
1623 appears either explicitly or implicitly in X
1624 other than being stored into.
1625
1626 References contained within the substructure at LOC do not count.
1627 LOC may be zero, meaning don't ignore anything. */
1628
c9bd6bcd 1629bool
f7d504c2 1630refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
0c20a65f 1631 rtx *loc)
2c88418c 1632{
770ae6cc
RK
1633 int i;
1634 unsigned int x_regno;
1635 RTX_CODE code;
1636 const char *fmt;
2c88418c
RS
1637
1638 repeat:
1639 /* The contents of a REG_NONNEG note is always zero, so we must come here
1640 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1641 if (x == 0)
c9bd6bcd 1642 return false;
2c88418c
RS
1643
1644 code = GET_CODE (x);
1645
1646 switch (code)
1647 {
1648 case REG:
770ae6cc 1649 x_regno = REGNO (x);
f8163c92
RK
1650
1651 /* If we modifying the stack, frame, or argument pointer, it will
1652 clobber a virtual register. In fact, we could be more precise,
1653 but it isn't worth it. */
770ae6cc 1654 if ((x_regno == STACK_POINTER_REGNUM
3f393fc6
TS
1655 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1656 && x_regno == ARG_POINTER_REGNUM)
770ae6cc 1657 || x_regno == FRAME_POINTER_REGNUM)
f8163c92 1658 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
c9bd6bcd 1659 return true;
f8163c92 1660
09e18274 1661 return endregno > x_regno && regno < END_REGNO (x);
2c88418c
RS
1662
1663 case SUBREG:
1664 /* If this is a SUBREG of a hard reg, we can see exactly which
1665 registers are being modified. Otherwise, handle normally. */
f8cfc6aa 1666 if (REG_P (SUBREG_REG (x))
2c88418c
RS
1667 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1668 {
ddef6bc7 1669 unsigned int inner_regno = subreg_regno (x);
770ae6cc 1670 unsigned int inner_endregno
403c659c 1671 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
f1f4e530 1672 ? subreg_nregs (x) : 1);
2c88418c
RS
1673
1674 return endregno > inner_regno && regno < inner_endregno;
1675 }
1676 break;
1677
1678 case CLOBBER:
1679 case SET:
1680 if (&SET_DEST (x) != loc
1681 /* Note setting a SUBREG counts as referring to the REG it is in for
1682 a pseudo but not for hard registers since we can
1683 treat each word individually. */
1684 && ((GET_CODE (SET_DEST (x)) == SUBREG
1685 && loc != &SUBREG_REG (SET_DEST (x))
f8cfc6aa 1686 && REG_P (SUBREG_REG (SET_DEST (x)))
2c88418c
RS
1687 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1688 && refers_to_regno_p (regno, endregno,
1689 SUBREG_REG (SET_DEST (x)), loc))
f8cfc6aa 1690 || (!REG_P (SET_DEST (x))
2c88418c 1691 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
c9bd6bcd 1692 return true;
2c88418c
RS
1693
1694 if (code == CLOBBER || loc == &SET_SRC (x))
c9bd6bcd 1695 return false;
2c88418c
RS
1696 x = SET_SRC (x);
1697 goto repeat;
e9a25f70
JL
1698
1699 default:
1700 break;
2c88418c
RS
1701 }
1702
1703 /* X does not match, so try its subexpressions. */
1704
1705 fmt = GET_RTX_FORMAT (code);
1706 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1707 {
1708 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1709 {
1710 if (i == 0)
1711 {
1712 x = XEXP (x, 0);
1713 goto repeat;
1714 }
1715 else
1716 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
c9bd6bcd 1717 return true;
2c88418c
RS
1718 }
1719 else if (fmt[i] == 'E')
1720 {
b3694847 1721 int j;
6a87d634 1722 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2c88418c
RS
1723 if (loc != &XVECEXP (x, i, j)
1724 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
c9bd6bcd 1725 return true;
2c88418c
RS
1726 }
1727 }
c9bd6bcd 1728 return false;
2c88418c
RS
1729}
1730
1731/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1732 we check if any register number in X conflicts with the relevant register
1733 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1734 contains a MEM (we don't bother checking for memory addresses that can't
1735 conflict because we expect this to be a rare case. */
1736
1737int
f7d504c2 1738reg_overlap_mentioned_p (const_rtx x, const_rtx in)
2c88418c 1739{
770ae6cc 1740 unsigned int regno, endregno;
2c88418c 1741
6f626d1b
PB
1742 /* If either argument is a constant, then modifying X can not
1743 affect IN. Here we look at IN, we can profitably combine
1744 CONSTANT_P (x) with the switch statement below. */
1745 if (CONSTANT_P (in))
b98b49ac 1746 return 0;
0c99ec5c 1747
6f626d1b 1748 recurse:
0c99ec5c 1749 switch (GET_CODE (x))
2c88418c 1750 {
6f626d1b
PB
1751 case STRICT_LOW_PART:
1752 case ZERO_EXTRACT:
1753 case SIGN_EXTRACT:
1754 /* Overly conservative. */
1755 x = XEXP (x, 0);
1756 goto recurse;
1757
0c99ec5c 1758 case SUBREG:
2c88418c
RS
1759 regno = REGNO (SUBREG_REG (x));
1760 if (regno < FIRST_PSEUDO_REGISTER)
ddef6bc7 1761 regno = subreg_regno (x);
f1f4e530
JM
1762 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1763 ? subreg_nregs (x) : 1);
0c99ec5c 1764 goto do_reg;
2c88418c 1765
0c99ec5c
RH
1766 case REG:
1767 regno = REGNO (x);
09e18274 1768 endregno = END_REGNO (x);
f1f4e530 1769 do_reg:
8e2e89f7 1770 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
2c88418c 1771
0c99ec5c
RH
1772 case MEM:
1773 {
1774 const char *fmt;
1775 int i;
2c88418c 1776
3c0cb5de 1777 if (MEM_P (in))
2c88418c
RS
1778 return 1;
1779
0c99ec5c
RH
1780 fmt = GET_RTX_FORMAT (GET_CODE (in));
1781 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
3b009185
RH
1782 if (fmt[i] == 'e')
1783 {
1784 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1785 return 1;
1786 }
1787 else if (fmt[i] == 'E')
1788 {
1789 int j;
1790 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1791 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1792 return 1;
1793 }
c0222c21 1794
0c99ec5c
RH
1795 return 0;
1796 }
1797
1798 case SCRATCH:
1799 case PC:
1800 case CC0:
1801 return reg_mentioned_p (x, in);
1802
1803 case PARALLEL:
37ceff9d 1804 {
90d036a0 1805 int i;
37ceff9d
RH
1806
1807 /* If any register in here refers to it we return true. */
7193d1dc
RK
1808 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1809 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1810 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
6f626d1b 1811 return 1;
7193d1dc 1812 return 0;
37ceff9d 1813 }
2c88418c 1814
0c99ec5c 1815 default:
41374e13 1816 gcc_assert (CONSTANT_P (x));
6f626d1b
PB
1817 return 0;
1818 }
2c88418c
RS
1819}
1820\f
2c88418c 1821/* Call FUN on each register or MEM that is stored into or clobbered by X.
c3a1ef9d
MM
1822 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1823 ignored by note_stores, but passed to FUN.
1824
1825 FUN receives three arguments:
1826 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1827 2. the SET or CLOBBER rtx that does the store,
1828 3. the pointer DATA provided to note_stores.
2c88418c
RS
1829
1830 If the item being stored in or clobbered is a SUBREG of a hard register,
1831 the SUBREG will be passed. */
a6a2274a 1832
2c88418c 1833void
7bc980e1 1834note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
2c88418c 1835{
aa317c97 1836 int i;
90d036a0 1837
aa317c97
KG
1838 if (GET_CODE (x) == COND_EXEC)
1839 x = COND_EXEC_CODE (x);
90d036a0 1840
aa317c97
KG
1841 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1842 {
1843 rtx dest = SET_DEST (x);
1844
1845 while ((GET_CODE (dest) == SUBREG
1846 && (!REG_P (SUBREG_REG (dest))
1847 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1848 || GET_CODE (dest) == ZERO_EXTRACT
1849 || GET_CODE (dest) == STRICT_LOW_PART)
1850 dest = XEXP (dest, 0);
1851
1852 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1853 each of whose first operand is a register. */
1854 if (GET_CODE (dest) == PARALLEL)
1855 {
1856 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1857 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1858 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1859 }
1860 else
1861 (*fun) (dest, x, data);
1862 }
770ae6cc 1863
aa317c97
KG
1864 else if (GET_CODE (x) == PARALLEL)
1865 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1866 note_stores (XVECEXP (x, 0, i), fun, data);
1867}
2c88418c 1868\f
e2373f95
RK
1869/* Like notes_stores, but call FUN for each expression that is being
1870 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1871 FUN for each expression, not any interior subexpressions. FUN receives a
1872 pointer to the expression and the DATA passed to this function.
1873
1874 Note that this is not quite the same test as that done in reg_referenced_p
1875 since that considers something as being referenced if it is being
1876 partially set, while we do not. */
1877
1878void
0c20a65f 1879note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
e2373f95
RK
1880{
1881 rtx body = *pbody;
1882 int i;
1883
1884 switch (GET_CODE (body))
1885 {
1886 case COND_EXEC:
1887 (*fun) (&COND_EXEC_TEST (body), data);
1888 note_uses (&COND_EXEC_CODE (body), fun, data);
1889 return;
1890
1891 case PARALLEL:
1892 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1893 note_uses (&XVECEXP (body, 0, i), fun, data);
1894 return;
1895
bbbc206e
BS
1896 case SEQUENCE:
1897 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1898 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1899 return;
1900
e2373f95
RK
1901 case USE:
1902 (*fun) (&XEXP (body, 0), data);
1903 return;
1904
1905 case ASM_OPERANDS:
1906 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1907 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1908 return;
1909
1910 case TRAP_IF:
1911 (*fun) (&TRAP_CONDITION (body), data);
1912 return;
1913
21b8482a
JJ
1914 case PREFETCH:
1915 (*fun) (&XEXP (body, 0), data);
1916 return;
1917
e2373f95
RK
1918 case UNSPEC:
1919 case UNSPEC_VOLATILE:
1920 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1921 (*fun) (&XVECEXP (body, 0, i), data);
1922 return;
1923
1924 case CLOBBER:
3c0cb5de 1925 if (MEM_P (XEXP (body, 0)))
e2373f95
RK
1926 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1927 return;
1928
1929 case SET:
1930 {
1931 rtx dest = SET_DEST (body);
1932
1933 /* For sets we replace everything in source plus registers in memory
1934 expression in store and operands of a ZERO_EXTRACT. */
1935 (*fun) (&SET_SRC (body), data);
1936
1937 if (GET_CODE (dest) == ZERO_EXTRACT)
1938 {
1939 (*fun) (&XEXP (dest, 1), data);
1940 (*fun) (&XEXP (dest, 2), data);
1941 }
1942
1943 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1944 dest = XEXP (dest, 0);
1945
3c0cb5de 1946 if (MEM_P (dest))
e2373f95
RK
1947 (*fun) (&XEXP (dest, 0), data);
1948 }
1949 return;
1950
1951 default:
1952 /* All the other possibilities never store. */
1953 (*fun) (pbody, data);
1954 return;
1955 }
1956}
1957\f
2c88418c
RS
1958/* Return nonzero if X's old contents don't survive after INSN.
1959 This will be true if X is (cc0) or if X is a register and
1960 X dies in INSN or because INSN entirely sets X.
1961
46d096a3
SB
1962 "Entirely set" means set directly and not through a SUBREG, or
1963 ZERO_EXTRACT, so no trace of the old contents remains.
2c88418c
RS
1964 Likewise, REG_INC does not count.
1965
1966 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1967 but for this use that makes no difference, since regs don't overlap
1968 during their lifetimes. Therefore, this function may be used
6fb5fa3c 1969 at any time after deaths have been computed.
2c88418c
RS
1970
1971 If REG is a hard reg that occupies multiple machine registers, this
1972 function will only return 1 if each of those registers will be replaced
1973 by INSN. */
1974
1975int
f7d504c2 1976dead_or_set_p (const_rtx insn, const_rtx x)
2c88418c 1977{
09e18274 1978 unsigned int regno, end_regno;
770ae6cc 1979 unsigned int i;
2c88418c
RS
1980
1981 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1982 if (GET_CODE (x) == CC0)
1983 return 1;
1984
41374e13 1985 gcc_assert (REG_P (x));
2c88418c
RS
1986
1987 regno = REGNO (x);
09e18274
RS
1988 end_regno = END_REGNO (x);
1989 for (i = regno; i < end_regno; i++)
2c88418c
RS
1990 if (! dead_or_set_regno_p (insn, i))
1991 return 0;
1992
1993 return 1;
1994}
1995
194acded
HPN
1996/* Return TRUE iff DEST is a register or subreg of a register and
1997 doesn't change the number of words of the inner register, and any
1998 part of the register is TEST_REGNO. */
1999
2000static bool
f7d504c2 2001covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
194acded
HPN
2002{
2003 unsigned int regno, endregno;
2004
2005 if (GET_CODE (dest) == SUBREG
2006 && (((GET_MODE_SIZE (GET_MODE (dest))
2007 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
2008 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2009 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
2010 dest = SUBREG_REG (dest);
2011
2012 if (!REG_P (dest))
2013 return false;
2014
2015 regno = REGNO (dest);
09e18274 2016 endregno = END_REGNO (dest);
194acded
HPN
2017 return (test_regno >= regno && test_regno < endregno);
2018}
2019
2020/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
2021 any member matches the covers_regno_no_parallel_p criteria. */
2022
2023static bool
f7d504c2 2024covers_regno_p (const_rtx dest, unsigned int test_regno)
194acded
HPN
2025{
2026 if (GET_CODE (dest) == PARALLEL)
2027 {
2028 /* Some targets place small structures in registers for return
2029 values of functions, and those registers are wrapped in
2030 PARALLELs that we may see as the destination of a SET. */
2031 int i;
2032
2033 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
2034 {
2035 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
2036 if (inner != NULL_RTX
2037 && covers_regno_no_parallel_p (inner, test_regno))
2038 return true;
2039 }
2040
2041 return false;
2042 }
2043 else
2044 return covers_regno_no_parallel_p (dest, test_regno);
2045}
2046
6fb5fa3c 2047/* Utility function for dead_or_set_p to check an individual register. */
2c88418c
RS
2048
2049int
f7d504c2 2050dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
2c88418c 2051{
f7d504c2 2052 const_rtx pattern;
2c88418c 2053
0a2287bf
RH
2054 /* See if there is a death note for something that includes TEST_REGNO. */
2055 if (find_regno_note (insn, REG_DEAD, test_regno))
2056 return 1;
2c88418c 2057
4b4bf941 2058 if (CALL_P (insn)
8f3e7a26
RK
2059 && find_regno_fusage (insn, CLOBBER, test_regno))
2060 return 1;
2061
0c99ec5c
RH
2062 pattern = PATTERN (insn);
2063
10439b59 2064 /* If a COND_EXEC is not executed, the value survives. */
0c99ec5c 2065 if (GET_CODE (pattern) == COND_EXEC)
10439b59 2066 return 0;
0c99ec5c
RH
2067
2068 if (GET_CODE (pattern) == SET)
194acded 2069 return covers_regno_p (SET_DEST (pattern), test_regno);
0c99ec5c 2070 else if (GET_CODE (pattern) == PARALLEL)
2c88418c 2071 {
b3694847 2072 int i;
2c88418c 2073
0c99ec5c 2074 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2c88418c 2075 {
0c99ec5c
RH
2076 rtx body = XVECEXP (pattern, 0, i);
2077
2078 if (GET_CODE (body) == COND_EXEC)
2079 body = COND_EXEC_CODE (body);
2c88418c 2080
194acded
HPN
2081 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
2082 && covers_regno_p (SET_DEST (body), test_regno))
2083 return 1;
2c88418c
RS
2084 }
2085 }
2086
2087 return 0;
2088}
2089
2090/* Return the reg-note of kind KIND in insn INSN, if there is one.
2091 If DATUM is nonzero, look for one whose datum is DATUM. */
2092
2093rtx
f7d504c2 2094find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
2c88418c 2095{
b3694847 2096 rtx link;
2c88418c 2097
7a40b8b1 2098 gcc_checking_assert (insn);
af082de3 2099
ae78d276 2100 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 2101 if (! INSN_P (insn))
ae78d276 2102 return 0;
cd798543
AP
2103 if (datum == 0)
2104 {
2105 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2106 if (REG_NOTE_KIND (link) == kind)
2107 return link;
2108 return 0;
2109 }
ae78d276 2110
2c88418c 2111 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cd798543 2112 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2c88418c
RS
2113 return link;
2114 return 0;
2115}
2116
2117/* Return the reg-note of kind KIND in insn INSN which applies to register
99309f3b
RK
2118 number REGNO, if any. Return 0 if there is no such reg-note. Note that
2119 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
2120 it might be the case that the note overlaps REGNO. */
2c88418c
RS
2121
2122rtx
f7d504c2 2123find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
2c88418c 2124{
b3694847 2125 rtx link;
2c88418c 2126
ae78d276 2127 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 2128 if (! INSN_P (insn))
ae78d276
MM
2129 return 0;
2130
2c88418c
RS
2131 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2132 if (REG_NOTE_KIND (link) == kind
2133 /* Verify that it is a register, so that scratch and MEM won't cause a
2134 problem here. */
f8cfc6aa 2135 && REG_P (XEXP (link, 0))
99309f3b 2136 && REGNO (XEXP (link, 0)) <= regno
09e18274 2137 && END_REGNO (XEXP (link, 0)) > regno)
2c88418c
RS
2138 return link;
2139 return 0;
2140}
8f3e7a26 2141
d9c695ff
RK
2142/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
2143 has such a note. */
2144
2145rtx
f7d504c2 2146find_reg_equal_equiv_note (const_rtx insn)
d9c695ff 2147{
cd648cec 2148 rtx link;
d9c695ff 2149
cd648cec 2150 if (!INSN_P (insn))
d9c695ff 2151 return 0;
ea8f106d 2152
cd648cec
JH
2153 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2154 if (REG_NOTE_KIND (link) == REG_EQUAL
2155 || REG_NOTE_KIND (link) == REG_EQUIV)
2156 {
ea8f106d
SB
2157 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
2158 insns that have multiple sets. Checking single_set to
2159 make sure of this is not the proper check, as explained
2160 in the comment in set_unique_reg_note.
2161
2162 This should be changed into an assert. */
2163 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
cd648cec
JH
2164 return 0;
2165 return link;
2166 }
2167 return NULL;
d9c695ff
RK
2168}
2169
2a450639
RS
2170/* Check whether INSN is a single_set whose source is known to be
2171 equivalent to a constant. Return that constant if so, otherwise
2172 return null. */
2173
2174rtx
68a1a6c0 2175find_constant_src (const rtx_insn *insn)
2a450639
RS
2176{
2177 rtx note, set, x;
2178
2179 set = single_set (insn);
2180 if (set)
2181 {
2182 x = avoid_constant_pool_reference (SET_SRC (set));
2183 if (CONSTANT_P (x))
2184 return x;
2185 }
2186
2187 note = find_reg_equal_equiv_note (insn);
2188 if (note && CONSTANT_P (XEXP (note, 0)))
2189 return XEXP (note, 0);
2190
2191 return NULL_RTX;
2192}
2193
8f3e7a26
RK
2194/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
2195 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2196
2197int
f7d504c2 2198find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
8f3e7a26
RK
2199{
2200 /* If it's not a CALL_INSN, it can't possibly have a
2201 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
4b4bf941 2202 if (!CALL_P (insn))
8f3e7a26
RK
2203 return 0;
2204
41374e13 2205 gcc_assert (datum);
8f3e7a26 2206
f8cfc6aa 2207 if (!REG_P (datum))
8f3e7a26 2208 {
b3694847 2209 rtx link;
8f3e7a26
RK
2210
2211 for (link = CALL_INSN_FUNCTION_USAGE (insn);
a6a2274a 2212 link;
8f3e7a26 2213 link = XEXP (link, 1))
a6a2274a 2214 if (GET_CODE (XEXP (link, 0)) == code
cc863bea 2215 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
a6a2274a 2216 return 1;
8f3e7a26
RK
2217 }
2218 else
2219 {
770ae6cc 2220 unsigned int regno = REGNO (datum);
8f3e7a26
RK
2221
2222 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2223 to pseudo registers, so don't bother checking. */
2224
2225 if (regno < FIRST_PSEUDO_REGISTER)
a6a2274a 2226 {
72d19505 2227 unsigned int end_regno = END_REGNO (datum);
770ae6cc 2228 unsigned int i;
8f3e7a26
RK
2229
2230 for (i = regno; i < end_regno; i++)
2231 if (find_regno_fusage (insn, code, i))
2232 return 1;
a6a2274a 2233 }
8f3e7a26
RK
2234 }
2235
2236 return 0;
2237}
2238
2239/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2240 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2241
2242int
f7d504c2 2243find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
8f3e7a26 2244{
b3694847 2245 rtx link;
8f3e7a26
RK
2246
2247 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2248 to pseudo registers, so don't bother checking. */
2249
2250 if (regno >= FIRST_PSEUDO_REGISTER
4b4bf941 2251 || !CALL_P (insn) )
8f3e7a26
RK
2252 return 0;
2253
2254 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
83ab3839 2255 {
770ae6cc 2256 rtx op, reg;
83ab3839
RH
2257
2258 if (GET_CODE (op = XEXP (link, 0)) == code
f8cfc6aa 2259 && REG_P (reg = XEXP (op, 0))
09e18274 2260 && REGNO (reg) <= regno
72d19505 2261 && END_REGNO (reg) > regno)
83ab3839
RH
2262 return 1;
2263 }
8f3e7a26
RK
2264
2265 return 0;
2266}
a6a063b8 2267
2c88418c 2268\f
e5af9ddd
RS
2269/* Return true if KIND is an integer REG_NOTE. */
2270
2271static bool
2272int_reg_note_p (enum reg_note kind)
2273{
2274 return kind == REG_BR_PROB;
2275}
2276
efc0b2bd
ILT
2277/* Allocate a register note with kind KIND and datum DATUM. LIST is
2278 stored as the pointer to the next register note. */
65c5f2a6 2279
efc0b2bd
ILT
2280rtx
2281alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
65c5f2a6
ILT
2282{
2283 rtx note;
2284
e5af9ddd 2285 gcc_checking_assert (!int_reg_note_p (kind));
65c5f2a6
ILT
2286 switch (kind)
2287 {
2288 case REG_CC_SETTER:
2289 case REG_CC_USER:
2290 case REG_LABEL_TARGET:
2291 case REG_LABEL_OPERAND:
0a35513e 2292 case REG_TM:
65c5f2a6
ILT
2293 /* These types of register notes use an INSN_LIST rather than an
2294 EXPR_LIST, so that copying is done right and dumps look
2295 better. */
efc0b2bd 2296 note = alloc_INSN_LIST (datum, list);
65c5f2a6
ILT
2297 PUT_REG_NOTE_KIND (note, kind);
2298 break;
2299
2300 default:
efc0b2bd 2301 note = alloc_EXPR_LIST (kind, datum, list);
65c5f2a6
ILT
2302 break;
2303 }
2304
efc0b2bd
ILT
2305 return note;
2306}
2307
2308/* Add register note with kind KIND and datum DATUM to INSN. */
2309
2310void
2311add_reg_note (rtx insn, enum reg_note kind, rtx datum)
2312{
2313 REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
65c5f2a6
ILT
2314}
2315
e5af9ddd
RS
2316/* Add an integer register note with kind KIND and datum DATUM to INSN. */
2317
2318void
2319add_int_reg_note (rtx insn, enum reg_note kind, int datum)
2320{
2321 gcc_checking_assert (int_reg_note_p (kind));
ef4bddc2 2322 REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
e5af9ddd
RS
2323 datum, REG_NOTES (insn));
2324}
2325
2326/* Add a register note like NOTE to INSN. */
2327
2328void
9b8d3c60 2329add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note)
e5af9ddd
RS
2330{
2331 if (GET_CODE (note) == INT_LIST)
2332 add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
2333 else
2334 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2335}
2336
2c88418c
RS
2337/* Remove register note NOTE from the REG_NOTES of INSN. */
2338
2339void
f7d504c2 2340remove_note (rtx insn, const_rtx note)
2c88418c 2341{
b3694847 2342 rtx link;
2c88418c 2343
49c3bb12
RH
2344 if (note == NULL_RTX)
2345 return;
2346
2c88418c 2347 if (REG_NOTES (insn) == note)
6fb5fa3c
DB
2348 REG_NOTES (insn) = XEXP (note, 1);
2349 else
2350 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2351 if (XEXP (link, 1) == note)
2352 {
2353 XEXP (link, 1) = XEXP (note, 1);
2354 break;
2355 }
2356
2357 switch (REG_NOTE_KIND (note))
2c88418c 2358 {
6fb5fa3c
DB
2359 case REG_EQUAL:
2360 case REG_EQUIV:
b2908ba6 2361 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
2362 break;
2363 default:
2364 break;
2c88418c 2365 }
2c88418c 2366}
55a98783 2367
7cd689bc
SB
2368/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
2369
2370void
43534595 2371remove_reg_equal_equiv_notes (rtx_insn *insn)
7cd689bc
SB
2372{
2373 rtx *loc;
2374
2375 loc = &REG_NOTES (insn);
2376 while (*loc)
2377 {
2378 enum reg_note kind = REG_NOTE_KIND (*loc);
2379 if (kind == REG_EQUAL || kind == REG_EQUIV)
2380 *loc = XEXP (*loc, 1);
2381 else
2382 loc = &XEXP (*loc, 1);
2383 }
2384}
885c9b5d
EB
2385
2386/* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */
2387
2388void
2389remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
2390{
2391 df_ref eq_use;
2392
2393 if (!df)
2394 return;
2395
2396 /* This loop is a little tricky. We cannot just go down the chain because
2397 it is being modified by some actions in the loop. So we just iterate
2398 over the head. We plan to drain the list anyway. */
2399 while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2400 {
1bbbc4a3 2401 rtx_insn *insn = DF_REF_INSN (eq_use);
885c9b5d
EB
2402 rtx note = find_reg_equal_equiv_note (insn);
2403
2404 /* This assert is generally triggered when someone deletes a REG_EQUAL
2405 or REG_EQUIV note by hacking the list manually rather than calling
2406 remove_note. */
2407 gcc_assert (note);
2408
2409 remove_note (insn, note);
2410 }
2411}
7cd689bc 2412
5f0d2358
RK
2413/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2414 return 1 if it is found. A simple equality test is used to determine if
2415 NODE matches. */
2416
0d53e74e
TS
2417bool
2418in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node)
5f0d2358 2419{
f7d504c2 2420 const_rtx x;
5f0d2358
RK
2421
2422 for (x = listp; x; x = XEXP (x, 1))
2423 if (node == XEXP (x, 0))
0d53e74e 2424 return true;
5f0d2358 2425
0d53e74e 2426 return false;
5f0d2358
RK
2427}
2428
dd248abd
RK
2429/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2430 remove that entry from the list if it is found.
55a98783 2431
dd248abd 2432 A simple equality test is used to determine if NODE matches. */
55a98783
JL
2433
2434void
2382940b 2435remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp)
55a98783 2436{
2382940b 2437 rtx_expr_list *temp = *listp;
e67d1102 2438 rtx_expr_list *prev = NULL;
55a98783
JL
2439
2440 while (temp)
2441 {
2382940b 2442 if (node == temp->element ())
55a98783
JL
2443 {
2444 /* Splice the node out of the list. */
2445 if (prev)
2382940b 2446 XEXP (prev, 1) = temp->next ();
55a98783 2447 else
2382940b 2448 *listp = temp->next ();
55a98783
JL
2449
2450 return;
2451 }
dd248abd
RK
2452
2453 prev = temp;
2382940b 2454 temp = temp->next ();
55a98783
JL
2455 }
2456}
b5241a5a
DM
2457
2458/* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
2459 remove that entry from the list if it is found.
2460
2461 A simple equality test is used to determine if NODE matches. */
2462
2463void
2464remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
2465{
2466 rtx_insn_list *temp = *listp;
e67d1102 2467 rtx_insn_list *prev = NULL;
b5241a5a
DM
2468
2469 while (temp)
2470 {
2471 if (node == temp->insn ())
2472 {
2473 /* Splice the node out of the list. */
2474 if (prev)
2475 XEXP (prev, 1) = temp->next ();
2476 else
2477 *listp = temp->next ();
2478
2479 return;
2480 }
2481
2482 prev = temp;
2483 temp = temp->next ();
2484 }
2485}
2c88418c 2486\f
2b067faf
RS
2487/* Nonzero if X contains any volatile instructions. These are instructions
2488 which may cause unpredictable machine state instructions, and thus no
adddc347
HPN
2489 instructions or register uses should be moved or combined across them.
2490 This includes only volatile asms and UNSPEC_VOLATILE instructions. */
2b067faf
RS
2491
2492int
f7d504c2 2493volatile_insn_p (const_rtx x)
2b067faf 2494{
f7d504c2 2495 const RTX_CODE code = GET_CODE (x);
2b067faf
RS
2496 switch (code)
2497 {
2498 case LABEL_REF:
2499 case SYMBOL_REF:
2b067faf 2500 case CONST:
d8116890 2501 CASE_CONST_ANY:
2b067faf
RS
2502 case CC0:
2503 case PC:
2504 case REG:
2505 case SCRATCH:
2506 case CLOBBER:
2b067faf
RS
2507 case ADDR_VEC:
2508 case ADDR_DIFF_VEC:
2509 case CALL:
2510 case MEM:
2511 return 0;
2512
2513 case UNSPEC_VOLATILE:
2b067faf
RS
2514 return 1;
2515
4c46ea23 2516 case ASM_INPUT:
2b067faf
RS
2517 case ASM_OPERANDS:
2518 if (MEM_VOLATILE_P (x))
2519 return 1;
e9a25f70
JL
2520
2521 default:
2522 break;
2b067faf
RS
2523 }
2524
2525 /* Recursively scan the operands of this expression. */
2526
2527 {
f7d504c2 2528 const char *const fmt = GET_RTX_FORMAT (code);
b3694847 2529 int i;
a6a2274a 2530
2b067faf
RS
2531 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2532 {
2533 if (fmt[i] == 'e')
2534 {
31001f72 2535 if (volatile_insn_p (XEXP (x, i)))
2b067faf
RS
2536 return 1;
2537 }
d4757e6a 2538 else if (fmt[i] == 'E')
2b067faf 2539 {
b3694847 2540 int j;
2b067faf 2541 for (j = 0; j < XVECLEN (x, i); j++)
31001f72 2542 if (volatile_insn_p (XVECEXP (x, i, j)))
2b067faf
RS
2543 return 1;
2544 }
2545 }
2546 }
2547 return 0;
2548}
2549
2c88418c 2550/* Nonzero if X contains any volatile memory references
2ac4fed0 2551 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2c88418c
RS
2552
2553int
f7d504c2 2554volatile_refs_p (const_rtx x)
2c88418c 2555{
f7d504c2 2556 const RTX_CODE code = GET_CODE (x);
2c88418c
RS
2557 switch (code)
2558 {
2559 case LABEL_REF:
2560 case SYMBOL_REF:
2c88418c 2561 case CONST:
d8116890 2562 CASE_CONST_ANY:
2c88418c
RS
2563 case CC0:
2564 case PC:
2565 case REG:
2566 case SCRATCH:
2567 case CLOBBER:
2c88418c
RS
2568 case ADDR_VEC:
2569 case ADDR_DIFF_VEC:
2570 return 0;
2571
2ac4fed0 2572 case UNSPEC_VOLATILE:
2c88418c
RS
2573 return 1;
2574
2575 case MEM:
4c46ea23 2576 case ASM_INPUT:
2c88418c
RS
2577 case ASM_OPERANDS:
2578 if (MEM_VOLATILE_P (x))
2579 return 1;
e9a25f70
JL
2580
2581 default:
2582 break;
2c88418c
RS
2583 }
2584
2585 /* Recursively scan the operands of this expression. */
2586
2587 {
f7d504c2 2588 const char *const fmt = GET_RTX_FORMAT (code);
b3694847 2589 int i;
a6a2274a 2590
2c88418c
RS
2591 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2592 {
2593 if (fmt[i] == 'e')
2594 {
2595 if (volatile_refs_p (XEXP (x, i)))
2596 return 1;
2597 }
d4757e6a 2598 else if (fmt[i] == 'E')
2c88418c 2599 {
b3694847 2600 int j;
2c88418c
RS
2601 for (j = 0; j < XVECLEN (x, i); j++)
2602 if (volatile_refs_p (XVECEXP (x, i, j)))
2603 return 1;
2604 }
2605 }
2606 }
2607 return 0;
2608}
2609
2610/* Similar to above, except that it also rejects register pre- and post-
2611 incrementing. */
2612
2613int
f7d504c2 2614side_effects_p (const_rtx x)
2c88418c 2615{
f7d504c2 2616 const RTX_CODE code = GET_CODE (x);
2c88418c
RS
2617 switch (code)
2618 {
2619 case LABEL_REF:
2620 case SYMBOL_REF:
2c88418c 2621 case CONST:
d8116890 2622 CASE_CONST_ANY:
2c88418c
RS
2623 case CC0:
2624 case PC:
2625 case REG:
2626 case SCRATCH:
2c88418c
RS
2627 case ADDR_VEC:
2628 case ADDR_DIFF_VEC:
b5b8b0ac 2629 case VAR_LOCATION:
2c88418c
RS
2630 return 0;
2631
2632 case CLOBBER:
2633 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2634 when some combination can't be done. If we see one, don't think
2635 that we can simplify the expression. */
2636 return (GET_MODE (x) != VOIDmode);
2637
2638 case PRE_INC:
2639 case PRE_DEC:
2640 case POST_INC:
2641 case POST_DEC:
1fb9c5cd
MH
2642 case PRE_MODIFY:
2643 case POST_MODIFY:
2c88418c 2644 case CALL:
2ac4fed0 2645 case UNSPEC_VOLATILE:
2c88418c
RS
2646 return 1;
2647
2648 case MEM:
4c46ea23 2649 case ASM_INPUT:
2c88418c
RS
2650 case ASM_OPERANDS:
2651 if (MEM_VOLATILE_P (x))
2652 return 1;
e9a25f70
JL
2653
2654 default:
2655 break;
2c88418c
RS
2656 }
2657
2658 /* Recursively scan the operands of this expression. */
2659
2660 {
b3694847
SS
2661 const char *fmt = GET_RTX_FORMAT (code);
2662 int i;
a6a2274a 2663
2c88418c
RS
2664 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2665 {
2666 if (fmt[i] == 'e')
2667 {
2668 if (side_effects_p (XEXP (x, i)))
2669 return 1;
2670 }
d4757e6a 2671 else if (fmt[i] == 'E')
2c88418c 2672 {
b3694847 2673 int j;
2c88418c
RS
2674 for (j = 0; j < XVECLEN (x, i); j++)
2675 if (side_effects_p (XVECEXP (x, i, j)))
2676 return 1;
2677 }
2678 }
2679 }
2680 return 0;
2681}
2682\f
e755fcf5 2683/* Return nonzero if evaluating rtx X might cause a trap.
48e8382e
PB
2684 FLAGS controls how to consider MEMs. A nonzero means the context
2685 of the access may have changed from the original, such that the
2686 address may have become invalid. */
2c88418c 2687
215b063c 2688int
f7d504c2 2689may_trap_p_1 (const_rtx x, unsigned flags)
2c88418c
RS
2690{
2691 int i;
2692 enum rtx_code code;
6f7d635c 2693 const char *fmt;
48e8382e
PB
2694
2695 /* We make no distinction currently, but this function is part of
2696 the internal target-hooks ABI so we keep the parameter as
2697 "unsigned flags". */
2698 bool code_changed = flags != 0;
2c88418c
RS
2699
2700 if (x == 0)
2701 return 0;
2702 code = GET_CODE (x);
2703 switch (code)
2704 {
2705 /* Handle these cases quickly. */
d8116890 2706 CASE_CONST_ANY:
2c88418c
RS
2707 case SYMBOL_REF:
2708 case LABEL_REF:
2709 case CONST:
2710 case PC:
2711 case CC0:
2712 case REG:
2713 case SCRATCH:
2714 return 0;
2715
215b063c 2716 case UNSPEC:
215b063c
PB
2717 return targetm.unspec_may_trap_p (x, flags);
2718
c84a808e 2719 case UNSPEC_VOLATILE:
215b063c 2720 case ASM_INPUT:
2c88418c
RS
2721 case TRAP_IF:
2722 return 1;
2723
22aa60a1
RH
2724 case ASM_OPERANDS:
2725 return MEM_VOLATILE_P (x);
2726
2c88418c
RS
2727 /* Memory ref can trap unless it's a static var or a stack slot. */
2728 case MEM:
d809253a
EB
2729 /* Recognize specific pattern of stack checking probes. */
2730 if (flag_stack_check
2731 && MEM_VOLATILE_P (x)
2732 && XEXP (x, 0) == stack_pointer_rtx)
2733 return 1;
e755fcf5 2734 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
48e8382e
PB
2735 reference; moving it out of context such as when moving code
2736 when optimizing, might cause its address to become invalid. */
2737 code_changed
2738 || !MEM_NOTRAP_P (x))
2739 {
f5541398 2740 HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
48e8382e
PB
2741 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2742 GET_MODE (x), code_changed);
2743 }
2744
2745 return 0;
2c88418c
RS
2746
2747 /* Division by a non-constant might trap. */
2748 case DIV:
2749 case MOD:
2750 case UDIV:
2751 case UMOD:
3d3dbadd 2752 if (HONOR_SNANS (x))
52bfebf0 2753 return 1;
3d8bf70f 2754 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
f9013075
DE
2755 return flag_trapping_math;
2756 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2c88418c 2757 return 1;
e9a25f70
JL
2758 break;
2759
b278301b
RK
2760 case EXPR_LIST:
2761 /* An EXPR_LIST is used to represent a function call. This
2762 certainly may trap. */
2763 return 1;
e9a25f70 2764
734508ea
JW
2765 case GE:
2766 case GT:
2767 case LE:
2768 case LT:
19aec195 2769 case LTGT:
55143861 2770 case COMPARE:
734508ea 2771 /* Some floating point comparisons may trap. */
f5eb5fd0
JH
2772 if (!flag_trapping_math)
2773 break;
734508ea
JW
2774 /* ??? There is no machine independent way to check for tests that trap
2775 when COMPARE is used, though many targets do make this distinction.
2776 For instance, sparc uses CCFPE for compares which generate exceptions
2777 and CCFP for compares which do not generate exceptions. */
1b457aa4 2778 if (HONOR_NANS (x))
55143861
JJ
2779 return 1;
2780 /* But often the compare has some CC mode, so check operand
2781 modes as well. */
1b457aa4
MG
2782 if (HONOR_NANS (XEXP (x, 0))
2783 || HONOR_NANS (XEXP (x, 1)))
52bfebf0
RS
2784 return 1;
2785 break;
2786
2787 case EQ:
2788 case NE:
3d3dbadd 2789 if (HONOR_SNANS (x))
52bfebf0
RS
2790 return 1;
2791 /* Often comparison is CC mode, so check operand modes. */
3d3dbadd
MG
2792 if (HONOR_SNANS (XEXP (x, 0))
2793 || HONOR_SNANS (XEXP (x, 1)))
55143861
JJ
2794 return 1;
2795 break;
2796
22fd5743
FH
2797 case FIX:
2798 /* Conversion of floating point might trap. */
1b457aa4 2799 if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
22fd5743
FH
2800 return 1;
2801 break;
2802
05cc23e8
RH
2803 case NEG:
2804 case ABS:
e3947b34 2805 case SUBREG:
05cc23e8
RH
2806 /* These operations don't trap even with floating point. */
2807 break;
2808
2c88418c
RS
2809 default:
2810 /* Any floating arithmetic may trap. */
c84a808e 2811 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
2c88418c
RS
2812 return 1;
2813 }
2814
2815 fmt = GET_RTX_FORMAT (code);
2816 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2817 {
2818 if (fmt[i] == 'e')
2819 {
e755fcf5 2820 if (may_trap_p_1 (XEXP (x, i), flags))
2c88418c
RS
2821 return 1;
2822 }
2823 else if (fmt[i] == 'E')
2824 {
b3694847 2825 int j;
2c88418c 2826 for (j = 0; j < XVECLEN (x, i); j++)
e755fcf5 2827 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2c88418c
RS
2828 return 1;
2829 }
2830 }
2831 return 0;
2832}
2358ff91
EB
2833
2834/* Return nonzero if evaluating rtx X might cause a trap. */
2835
2836int
f7d504c2 2837may_trap_p (const_rtx x)
2358ff91 2838{
e755fcf5
ZD
2839 return may_trap_p_1 (x, 0);
2840}
2841
c0220ea4 2842/* Same as above, but additionally return nonzero if evaluating rtx X might
2358ff91
EB
2843 cause a fault. We define a fault for the purpose of this function as a
2844 erroneous execution condition that cannot be encountered during the normal
2845 execution of a valid program; the typical example is an unaligned memory
2846 access on a strict alignment machine. The compiler guarantees that it
2847 doesn't generate code that will fault from a valid program, but this
2848 guarantee doesn't mean anything for individual instructions. Consider
2849 the following example:
2850
2851 struct S { int d; union { char *cp; int *ip; }; };
2852
2853 int foo(struct S *s)
2854 {
2855 if (s->d == 1)
2856 return *s->ip;
2857 else
2858 return *s->cp;
2859 }
2860
2861 on a strict alignment machine. In a valid program, foo will never be
2862 invoked on a structure for which d is equal to 1 and the underlying
2863 unique field of the union not aligned on a 4-byte boundary, but the
2864 expression *s->ip might cause a fault if considered individually.
2865
2866 At the RTL level, potentially problematic expressions will almost always
2867 verify may_trap_p; for example, the above dereference can be emitted as
2868 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2869 However, suppose that foo is inlined in a caller that causes s->cp to
2870 point to a local character variable and guarantees that s->d is not set
2871 to 1; foo may have been effectively translated into pseudo-RTL as:
2872
2873 if ((reg:SI) == 1)
2874 (set (reg:SI) (mem:SI (%fp - 7)))
2875 else
2876 (set (reg:QI) (mem:QI (%fp - 7)))
2877
2878 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2879 memory reference to a stack slot, but it will certainly cause a fault
2880 on a strict alignment machine. */
2881
2882int
f7d504c2 2883may_trap_or_fault_p (const_rtx x)
2358ff91 2884{
48e8382e 2885 return may_trap_p_1 (x, 1);
2358ff91 2886}
2c88418c
RS
2887\f
2888/* Return nonzero if X contains a comparison that is not either EQ or NE,
2889 i.e., an inequality. */
2890
2891int
f7d504c2 2892inequality_comparisons_p (const_rtx x)
2c88418c 2893{
b3694847
SS
2894 const char *fmt;
2895 int len, i;
f7d504c2 2896 const enum rtx_code code = GET_CODE (x);
2c88418c
RS
2897
2898 switch (code)
2899 {
2900 case REG:
2901 case SCRATCH:
2902 case PC:
2903 case CC0:
d8116890 2904 CASE_CONST_ANY:
2c88418c
RS
2905 case CONST:
2906 case LABEL_REF:
2907 case SYMBOL_REF:
2908 return 0;
2909
2910 case LT:
2911 case LTU:
2912 case GT:
2913 case GTU:
2914 case LE:
2915 case LEU:
2916 case GE:
2917 case GEU:
2918 return 1;
a6a2274a 2919
e9a25f70
JL
2920 default:
2921 break;
2c88418c
RS
2922 }
2923
2924 len = GET_RTX_LENGTH (code);
2925 fmt = GET_RTX_FORMAT (code);
2926
2927 for (i = 0; i < len; i++)
2928 {
2929 if (fmt[i] == 'e')
2930 {
2931 if (inequality_comparisons_p (XEXP (x, i)))
2932 return 1;
2933 }
2934 else if (fmt[i] == 'E')
2935 {
b3694847 2936 int j;
2c88418c
RS
2937 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2938 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2939 return 1;
2940 }
2941 }
a6a2274a 2942
2c88418c
RS
2943 return 0;
2944}
2945\f
1ed0205e
VM
2946/* Replace any occurrence of FROM in X with TO. The function does
2947 not enter into CONST_DOUBLE for the replace.
2c88418c
RS
2948
2949 Note that copying is not done so X must not be shared unless all copies
2950 are to be modified. */
2951
2952rtx
0c20a65f 2953replace_rtx (rtx x, rtx from, rtx to)
2c88418c 2954{
b3694847
SS
2955 int i, j;
2956 const char *fmt;
2c88418c
RS
2957
2958 if (x == from)
2959 return to;
2960
2961 /* Allow this function to make replacements in EXPR_LISTs. */
2962 if (x == 0)
2963 return 0;
2964
9dd791c8
AO
2965 if (GET_CODE (x) == SUBREG)
2966 {
55d796da 2967 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
9dd791c8 2968
481683e1 2969 if (CONST_INT_P (new_rtx))
9dd791c8 2970 {
55d796da 2971 x = simplify_subreg (GET_MODE (x), new_rtx,
9dd791c8
AO
2972 GET_MODE (SUBREG_REG (x)),
2973 SUBREG_BYTE (x));
41374e13 2974 gcc_assert (x);
9dd791c8
AO
2975 }
2976 else
55d796da 2977 SUBREG_REG (x) = new_rtx;
9dd791c8
AO
2978
2979 return x;
2980 }
2981 else if (GET_CODE (x) == ZERO_EXTEND)
2982 {
55d796da 2983 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
9dd791c8 2984
481683e1 2985 if (CONST_INT_P (new_rtx))
9dd791c8
AO
2986 {
2987 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
55d796da 2988 new_rtx, GET_MODE (XEXP (x, 0)));
41374e13 2989 gcc_assert (x);
9dd791c8
AO
2990 }
2991 else
55d796da 2992 XEXP (x, 0) = new_rtx;
9dd791c8
AO
2993
2994 return x;
2995 }
2996
2c88418c
RS
2997 fmt = GET_RTX_FORMAT (GET_CODE (x));
2998 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2999 {
3000 if (fmt[i] == 'e')
3001 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
3002 else if (fmt[i] == 'E')
3003 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3004 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
3005 }
3006
3007 return x;
a6a2274a 3008}
2c88418c 3009\f
a2b7026c
RS
3010/* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL. Also track
3011 the change in LABEL_NUSES if UPDATE_LABEL_NUSES. */
39811184 3012
a2b7026c
RS
3013void
3014replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
39811184 3015{
a2b7026c
RS
3016 /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long. */
3017 rtx x = *loc;
3018 if (JUMP_TABLE_DATA_P (x))
4af16369 3019 {
a2b7026c
RS
3020 x = PATTERN (x);
3021 rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
3022 int len = GET_NUM_ELEM (vec);
3023 for (int i = 0; i < len; ++i)
4af16369 3024 {
a2b7026c
RS
3025 rtx ref = RTVEC_ELT (vec, i);
3026 if (XEXP (ref, 0) == old_label)
3027 {
3028 XEXP (ref, 0) = new_label;
3029 if (update_label_nuses)
3030 {
3031 ++LABEL_NUSES (new_label);
3032 --LABEL_NUSES (old_label);
3033 }
3034 }
4af16369 3035 }
a2b7026c 3036 return;
4af16369
JZ
3037 }
3038
39811184 3039 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
a2b7026c 3040 field. This is not handled by the iterator because it doesn't
39811184 3041 handle unprinted ('0') fields. */
a2b7026c
RS
3042 if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
3043 JUMP_LABEL (x) = new_label;
39811184 3044
a2b7026c
RS
3045 subrtx_ptr_iterator::array_type array;
3046 FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
4af16369 3047 {
a2b7026c
RS
3048 rtx *loc = *iter;
3049 if (rtx x = *loc)
4af16369 3050 {
a2b7026c
RS
3051 if (GET_CODE (x) == SYMBOL_REF
3052 && CONSTANT_POOL_ADDRESS_P (x))
3053 {
3054 rtx c = get_pool_constant (x);
3055 if (rtx_referenced_p (old_label, c))
3056 {
3057 /* Create a copy of constant C; replace the label inside
3058 but do not update LABEL_NUSES because uses in constant pool
3059 are not counted. */
3060 rtx new_c = copy_rtx (c);
3061 replace_label (&new_c, old_label, new_label, false);
3062
3063 /* Add the new constant NEW_C to constant pool and replace
3064 the old reference to constant by new reference. */
3065 rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
3066 *loc = replace_rtx (x, x, XEXP (new_mem, 0));
3067 }
3068 }
3069
3070 if ((GET_CODE (x) == LABEL_REF
3071 || GET_CODE (x) == INSN_LIST)
3072 && XEXP (x, 0) == old_label)
3073 {
3074 XEXP (x, 0) = new_label;
3075 if (update_label_nuses)
3076 {
3077 ++LABEL_NUSES (new_label);
3078 --LABEL_NUSES (old_label);
3079 }
3080 }
4af16369 3081 }
4af16369 3082 }
a2b7026c 3083}
39811184 3084
a2b7026c
RS
3085void
3086replace_label_in_insn (rtx_insn *insn, rtx old_label, rtx new_label,
3087 bool update_label_nuses)
3088{
3089 rtx insn_as_rtx = insn;
3090 replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
3091 gcc_checking_assert (insn_as_rtx == insn);
39811184
JZ
3092}
3093
e08cf836 3094/* Return true if X is referenced in BODY. */
39811184 3095
e08cf836
RS
3096bool
3097rtx_referenced_p (const_rtx x, const_rtx body)
39811184 3098{
e08cf836
RS
3099 subrtx_iterator::array_type array;
3100 FOR_EACH_SUBRTX (iter, array, body, ALL)
3101 if (const_rtx y = *iter)
3102 {
3103 /* Check if a label_ref Y refers to label X. */
a827d9b1
DM
3104 if (GET_CODE (y) == LABEL_REF
3105 && LABEL_P (x)
3106 && LABEL_REF_LABEL (y) == x)
e08cf836 3107 return true;
39811184 3108
e08cf836
RS
3109 if (rtx_equal_p (x, y))
3110 return true;
39811184 3111
e08cf836
RS
3112 /* If Y is a reference to pool constant traverse the constant. */
3113 if (GET_CODE (y) == SYMBOL_REF
3114 && CONSTANT_POOL_ADDRESS_P (y))
3115 iter.substitute (get_pool_constant (y));
3116 }
3117 return false;
39811184
JZ
3118}
3119
ee735eef
JZ
3120/* If INSN is a tablejump return true and store the label (before jump table) to
3121 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
39811184
JZ
3122
3123bool
c5241a21 3124tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep)
39811184 3125{
1476d1bd
MM
3126 rtx label;
3127 rtx_insn *table;
ee735eef 3128
dc0ff1c8
BS
3129 if (!JUMP_P (insn))
3130 return false;
3131
3132 label = JUMP_LABEL (insn);
3133 if (label != NULL_RTX && !ANY_RETURN_P (label)
b32d5189 3134 && (table = NEXT_INSN (as_a <rtx_insn *> (label))) != NULL_RTX
481683e1 3135 && JUMP_TABLE_DATA_P (table))
39811184 3136 {
ee735eef
JZ
3137 if (labelp)
3138 *labelp = label;
3139 if (tablep)
8942ee0f 3140 *tablep = as_a <rtx_jump_table_data *> (table);
39811184
JZ
3141 return true;
3142 }
3143 return false;
3144}
3145
fce7e199
RH
3146/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
3147 constant that is not in the constant pool and not in the condition
3148 of an IF_THEN_ELSE. */
2a1777af
JL
3149
3150static int
f7d504c2 3151computed_jump_p_1 (const_rtx x)
2a1777af 3152{
f7d504c2 3153 const enum rtx_code code = GET_CODE (x);
2a1777af 3154 int i, j;
6f7d635c 3155 const char *fmt;
2a1777af
JL
3156
3157 switch (code)
3158 {
2a1777af
JL
3159 case LABEL_REF:
3160 case PC:
3161 return 0;
3162
fce7e199 3163 case CONST:
d8116890 3164 CASE_CONST_ANY:
fce7e199 3165 case SYMBOL_REF:
2a1777af
JL
3166 case REG:
3167 return 1;
3168
3169 case MEM:
3170 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3171 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
3172
3173 case IF_THEN_ELSE:
fce7e199
RH
3174 return (computed_jump_p_1 (XEXP (x, 1))
3175 || computed_jump_p_1 (XEXP (x, 2)));
1d300e19
KG
3176
3177 default:
3178 break;
2a1777af
JL
3179 }
3180
3181 fmt = GET_RTX_FORMAT (code);
3182 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3183 {
3184 if (fmt[i] == 'e'
fce7e199 3185 && computed_jump_p_1 (XEXP (x, i)))
2a1777af
JL
3186 return 1;
3187
d4757e6a 3188 else if (fmt[i] == 'E')
2a1777af 3189 for (j = 0; j < XVECLEN (x, i); j++)
fce7e199 3190 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2a1777af
JL
3191 return 1;
3192 }
3193
3194 return 0;
3195}
3196
3197/* Return nonzero if INSN is an indirect jump (aka computed jump).
3198
3199 Tablejumps and casesi insns are not considered indirect jumps;
4eb00163 3200 we can recognize them by a (use (label_ref)). */
2a1777af
JL
3201
3202int
63bd6324 3203computed_jump_p (const rtx_insn *insn)
2a1777af
JL
3204{
3205 int i;
4b4bf941 3206 if (JUMP_P (insn))
2a1777af
JL
3207 {
3208 rtx pat = PATTERN (insn);
2a1777af 3209
cf7c4aa6
HPN
3210 /* If we have a JUMP_LABEL set, we're not a computed jump. */
3211 if (JUMP_LABEL (insn) != NULL)
f759eb8b 3212 return 0;
cf7c4aa6
HPN
3213
3214 if (GET_CODE (pat) == PARALLEL)
2a1777af
JL
3215 {
3216 int len = XVECLEN (pat, 0);
3217 int has_use_labelref = 0;
3218
3219 for (i = len - 1; i >= 0; i--)
3220 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
3221 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
3222 == LABEL_REF))
c7b3b99f
PCC
3223 {
3224 has_use_labelref = 1;
3225 break;
3226 }
2a1777af
JL
3227
3228 if (! has_use_labelref)
3229 for (i = len - 1; i >= 0; i--)
3230 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
3231 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
fce7e199 3232 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2a1777af
JL
3233 return 1;
3234 }
3235 else if (GET_CODE (pat) == SET
3236 && SET_DEST (pat) == pc_rtx
fce7e199 3237 && computed_jump_p_1 (SET_SRC (pat)))
2a1777af
JL
3238 return 1;
3239 }
3240 return 0;
3241}
ccc2d6d0 3242
4deef538
AO
3243\f
3244
8d8e205b
RS
3245/* MEM has a PRE/POST-INC/DEC/MODIFY address X. Extract the operands of
3246 the equivalent add insn and pass the result to FN, using DATA as the
3247 final argument. */
4deef538
AO
3248
3249static int
8d8e205b 3250for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
4deef538 3251{
8d8e205b 3252 rtx x = XEXP (mem, 0);
4deef538
AO
3253 switch (GET_CODE (x))
3254 {
3255 case PRE_INC:
3256 case POST_INC:
3257 {
8d8e205b 3258 int size = GET_MODE_SIZE (GET_MODE (mem));
4deef538
AO
3259 rtx r1 = XEXP (x, 0);
3260 rtx c = gen_int_mode (size, GET_MODE (r1));
8d8e205b 3261 return fn (mem, x, r1, r1, c, data);
4deef538
AO
3262 }
3263
3264 case PRE_DEC:
3265 case POST_DEC:
3266 {
8d8e205b 3267 int size = GET_MODE_SIZE (GET_MODE (mem));
4deef538
AO
3268 rtx r1 = XEXP (x, 0);
3269 rtx c = gen_int_mode (-size, GET_MODE (r1));
8d8e205b 3270 return fn (mem, x, r1, r1, c, data);
4deef538
AO
3271 }
3272
3273 case PRE_MODIFY:
3274 case POST_MODIFY:
3275 {
3276 rtx r1 = XEXP (x, 0);
3277 rtx add = XEXP (x, 1);
8d8e205b 3278 return fn (mem, x, r1, add, NULL, data);
4deef538
AO
3279 }
3280
3281 default:
8d8e205b 3282 gcc_unreachable ();
4deef538
AO
3283 }
3284}
3285
8d8e205b
RS
3286/* Traverse *LOC looking for MEMs that have autoinc addresses.
3287 For each such autoinc operation found, call FN, passing it
4deef538
AO
3288 the innermost enclosing MEM, the operation itself, the RTX modified
3289 by the operation, two RTXs (the second may be NULL) that, once
3290 added, represent the value to be held by the modified RTX
8d8e205b
RS
3291 afterwards, and DATA. FN is to return 0 to continue the
3292 traversal or any other value to have it returned to the caller of
4deef538
AO
3293 for_each_inc_dec. */
3294
3295int
8d8e205b 3296for_each_inc_dec (rtx x,
4deef538 3297 for_each_inc_dec_fn fn,
8d8e205b 3298 void *data)
4deef538 3299{
8d8e205b
RS
3300 subrtx_var_iterator::array_type array;
3301 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
3302 {
3303 rtx mem = *iter;
3304 if (mem
3305 && MEM_P (mem)
3306 && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
3307 {
3308 int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
3309 if (res != 0)
3310 return res;
3311 iter.skip_subrtxes ();
3312 }
3313 }
3314 return 0;
4deef538
AO
3315}
3316
3317\f
777b1b71
RH
3318/* Searches X for any reference to REGNO, returning the rtx of the
3319 reference found if any. Otherwise, returns NULL_RTX. */
3320
3321rtx
0c20a65f 3322regno_use_in (unsigned int regno, rtx x)
777b1b71 3323{
b3694847 3324 const char *fmt;
777b1b71
RH
3325 int i, j;
3326 rtx tem;
3327
f8cfc6aa 3328 if (REG_P (x) && REGNO (x) == regno)
777b1b71
RH
3329 return x;
3330
3331 fmt = GET_RTX_FORMAT (GET_CODE (x));
3332 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3333 {
3334 if (fmt[i] == 'e')
3335 {
3336 if ((tem = regno_use_in (regno, XEXP (x, i))))
3337 return tem;
3338 }
3339 else if (fmt[i] == 'E')
3340 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3341 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3342 return tem;
3343 }
3344
3345 return NULL_RTX;
3346}
2dfa9a87 3347
e5c56fd9 3348/* Return a value indicating whether OP, an operand of a commutative
350911e6
AM
3349 operation, is preferred as the first or second operand. The more
3350 positive the value, the stronger the preference for being the first
3351 operand. */
e5c56fd9 3352
9b3bd424 3353int
0c20a65f 3354commutative_operand_precedence (rtx op)
e5c56fd9 3355{
e3d6e740 3356 enum rtx_code code = GET_CODE (op);
b8698a0f 3357
350911e6 3358 /* Constants always become the second operand. Prefer "nice" constants. */
e3d6e740 3359 if (code == CONST_INT)
7e0b4eae 3360 return -8;
807e902e
KZ
3361 if (code == CONST_WIDE_INT)
3362 return -8;
e3d6e740 3363 if (code == CONST_DOUBLE)
7e0b4eae 3364 return -7;
091a3ac7
CF
3365 if (code == CONST_FIXED)
3366 return -7;
9ce79a7a 3367 op = avoid_constant_pool_reference (op);
79b82df3 3368 code = GET_CODE (op);
ec8e098d
PB
3369
3370 switch (GET_RTX_CLASS (code))
3371 {
3372 case RTX_CONST_OBJ:
3373 if (code == CONST_INT)
7e0b4eae 3374 return -6;
807e902e
KZ
3375 if (code == CONST_WIDE_INT)
3376 return -6;
ec8e098d 3377 if (code == CONST_DOUBLE)
7e0b4eae 3378 return -5;
091a3ac7
CF
3379 if (code == CONST_FIXED)
3380 return -5;
7e0b4eae 3381 return -4;
ec8e098d
PB
3382
3383 case RTX_EXTRA:
3384 /* SUBREGs of objects should come second. */
3385 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
7e0b4eae 3386 return -3;
6fb5fa3c 3387 return 0;
ec8e098d
PB
3388
3389 case RTX_OBJ:
3390 /* Complex expressions should be the first, so decrease priority
7e0b4eae
PB
3391 of objects. Prefer pointer objects over non pointer objects. */
3392 if ((REG_P (op) && REG_POINTER (op))
3393 || (MEM_P (op) && MEM_POINTER (op)))
3394 return -1;
3395 return -2;
ec8e098d
PB
3396
3397 case RTX_COMM_ARITH:
3398 /* Prefer operands that are themselves commutative to be first.
3399 This helps to make things linear. In particular,
3400 (and (and (reg) (reg)) (not (reg))) is canonical. */
3401 return 4;
3402
3403 case RTX_BIN_ARITH:
3404 /* If only one operand is a binary expression, it will be the first
3405 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3406 is canonical, although it will usually be further simplified. */
3407 return 2;
b8698a0f 3408
ec8e098d
PB
3409 case RTX_UNARY:
3410 /* Then prefer NEG and NOT. */
3411 if (code == NEG || code == NOT)
3412 return 1;
e5c56fd9 3413
ec8e098d
PB
3414 default:
3415 return 0;
3416 }
e5c56fd9
JH
3417}
3418
f63d1bf7 3419/* Return 1 iff it is necessary to swap operands of commutative operation
e5c56fd9
JH
3420 in order to canonicalize expression. */
3421
7e0b4eae 3422bool
0c20a65f 3423swap_commutative_operands_p (rtx x, rtx y)
e5c56fd9 3424{
9b3bd424
RH
3425 return (commutative_operand_precedence (x)
3426 < commutative_operand_precedence (y));
e5c56fd9 3427}
2dfa9a87
MH
3428
3429/* Return 1 if X is an autoincrement side effect and the register is
3430 not the stack pointer. */
3431int
f7d504c2 3432auto_inc_p (const_rtx x)
2dfa9a87
MH
3433{
3434 switch (GET_CODE (x))
3435 {
3436 case PRE_INC:
3437 case POST_INC:
3438 case PRE_DEC:
3439 case POST_DEC:
3440 case PRE_MODIFY:
3441 case POST_MODIFY:
3442 /* There are no REG_INC notes for SP. */
3443 if (XEXP (x, 0) != stack_pointer_rtx)
3444 return 1;
3445 default:
3446 break;
3447 }
3448 return 0;
3449}
3b10cf4b 3450
f9da5064 3451/* Return nonzero if IN contains a piece of rtl that has the address LOC. */
db7ba742 3452int
f7d504c2 3453loc_mentioned_in_p (rtx *loc, const_rtx in)
db7ba742 3454{
a52b023a
PB
3455 enum rtx_code code;
3456 const char *fmt;
db7ba742
R
3457 int i, j;
3458
a52b023a
PB
3459 if (!in)
3460 return 0;
3461
3462 code = GET_CODE (in);
3463 fmt = GET_RTX_FORMAT (code);
db7ba742
R
3464 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3465 {
db7ba742
R
3466 if (fmt[i] == 'e')
3467 {
e0651058 3468 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
db7ba742
R
3469 return 1;
3470 }
3471 else if (fmt[i] == 'E')
3472 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
e0651058
AO
3473 if (loc == &XVECEXP (in, i, j)
3474 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
db7ba742
R
3475 return 1;
3476 }
3477 return 0;
3478}
ddef6bc7 3479
bb51e270
RS
3480/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3481 and SUBREG_BYTE, return the bit offset where the subreg begins
3482 (counting from the least significant bit of the operand). */
33aceff2
JW
3483
3484unsigned int
ef4bddc2
RS
3485subreg_lsb_1 (machine_mode outer_mode,
3486 machine_mode inner_mode,
bb51e270 3487 unsigned int subreg_byte)
33aceff2 3488{
33aceff2
JW
3489 unsigned int bitpos;
3490 unsigned int byte;
3491 unsigned int word;
3492
3493 /* A paradoxical subreg begins at bit position 0. */
5511bc5a 3494 if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
33aceff2
JW
3495 return 0;
3496
3497 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3498 /* If the subreg crosses a word boundary ensure that
3499 it also begins and ends on a word boundary. */
41374e13
NS
3500 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3501 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3502 && (subreg_byte % UNITS_PER_WORD
3503 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
33aceff2
JW
3504
3505 if (WORDS_BIG_ENDIAN)
3506 word = (GET_MODE_SIZE (inner_mode)
bb51e270 3507 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
33aceff2 3508 else
bb51e270 3509 word = subreg_byte / UNITS_PER_WORD;
33aceff2
JW
3510 bitpos = word * BITS_PER_WORD;
3511
3512 if (BYTES_BIG_ENDIAN)
3513 byte = (GET_MODE_SIZE (inner_mode)
bb51e270 3514 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
33aceff2 3515 else
bb51e270 3516 byte = subreg_byte % UNITS_PER_WORD;
33aceff2
JW
3517 bitpos += byte * BITS_PER_UNIT;
3518
3519 return bitpos;
3520}
3521
bb51e270
RS
3522/* Given a subreg X, return the bit offset where the subreg begins
3523 (counting from the least significant bit of the reg). */
3524
3525unsigned int
f7d504c2 3526subreg_lsb (const_rtx x)
bb51e270
RS
3527{
3528 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3529 SUBREG_BYTE (x));
3530}
3531
f1f4e530 3532/* Fill in information about a subreg of a hard register.
ddef6bc7
JJ
3533 xregno - A regno of an inner hard subreg_reg (or what will become one).
3534 xmode - The mode of xregno.
3535 offset - The byte offset.
3536 ymode - The mode of a top level SUBREG (or what may become one).
0cb07998
RS
3537 info - Pointer to structure to fill in.
3538
3539 Rather than considering one particular inner register (and thus one
3540 particular "outer" register) in isolation, this function really uses
3541 XREGNO as a model for a sequence of isomorphic hard registers. Thus the
3542 function does not check whether adding INFO->offset to XREGNO gives
3543 a valid hard register; even if INFO->offset + XREGNO is out of range,
3544 there might be another register of the same type that is in range.
3545 Likewise it doesn't check whether HARD_REGNO_MODE_OK accepts the new
3546 register, since that can depend on things like whether the final
3547 register number is even or odd. Callers that want to check whether
3548 this particular subreg can be replaced by a simple (reg ...) should
3549 use simplify_subreg_regno. */
3550
c619e982 3551void
ef4bddc2
RS
3552subreg_get_info (unsigned int xregno, machine_mode xmode,
3553 unsigned int offset, machine_mode ymode,
f1f4e530 3554 struct subreg_info *info)
04c5580f 3555{
8521c414 3556 int nregs_xmode, nregs_ymode;
04c5580f 3557 int mode_multiple, nregs_multiple;
f1f4e530 3558 int offset_adj, y_offset, y_offset_adj;
8521c414 3559 int regsize_xmode, regsize_ymode;
f1f4e530 3560 bool rknown;
04c5580f 3561
41374e13 3562 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
04c5580f 3563
f1f4e530
JM
3564 rknown = false;
3565
dd79bb7e
GK
3566 /* If there are holes in a non-scalar mode in registers, we expect
3567 that it is made up of its units concatenated together. */
8521c414 3568 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
dd79bb7e 3569 {
ef4bddc2 3570 machine_mode xmode_unit;
8521c414
JM
3571
3572 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3573 if (GET_MODE_INNER (xmode) == VOIDmode)
3574 xmode_unit = xmode;
3575 else
3576 xmode_unit = GET_MODE_INNER (xmode);
3577 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3578 gcc_assert (nregs_xmode
3579 == (GET_MODE_NUNITS (xmode)
3580 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3581 gcc_assert (hard_regno_nregs[xregno][xmode]
3582 == (hard_regno_nregs[xregno][xmode_unit]
3583 * GET_MODE_NUNITS (xmode)));
dd79bb7e
GK
3584
3585 /* You can only ask for a SUBREG of a value with holes in the middle
3586 if you don't cross the holes. (Such a SUBREG should be done by
3587 picking a different register class, or doing it in memory if
3588 necessary.) An example of a value with holes is XCmode on 32-bit
3589 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
b8698a0f 3590 3 for each part, but in memory it's two 128-bit parts.
dd79bb7e
GK
3591 Padding is assumed to be at the end (not necessarily the 'high part')
3592 of each unit. */
b8698a0f 3593 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
8521c414
JM
3594 < GET_MODE_NUNITS (xmode))
3595 && (offset / GET_MODE_SIZE (xmode_unit)
dd79bb7e 3596 != ((offset + GET_MODE_SIZE (ymode) - 1)
8521c414 3597 / GET_MODE_SIZE (xmode_unit))))
f1f4e530
JM
3598 {
3599 info->representable_p = false;
3600 rknown = true;
3601 }
dd79bb7e
GK
3602 }
3603 else
3604 nregs_xmode = hard_regno_nregs[xregno][xmode];
b8698a0f 3605
66fd46b6 3606 nregs_ymode = hard_regno_nregs[xregno][ymode];
04c5580f 3607
dd79bb7e 3608 /* Paradoxical subregs are otherwise valid. */
f1f4e530
JM
3609 if (!rknown
3610 && offset == 0
5511bc5a 3611 && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
f1f4e530
JM
3612 {
3613 info->representable_p = true;
3614 /* If this is a big endian paradoxical subreg, which uses more
3615 actual hard registers than the original register, we must
3616 return a negative offset so that we find the proper highpart
3617 of the register. */
3618 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
c0a6a1ef 3619 ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
f1f4e530
JM
3620 info->offset = nregs_xmode - nregs_ymode;
3621 else
3622 info->offset = 0;
3623 info->nregs = nregs_ymode;
3624 return;
3625 }
04c5580f 3626
8521c414
JM
3627 /* If registers store different numbers of bits in the different
3628 modes, we cannot generally form this subreg. */
f1f4e530 3629 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
5f7fc2b8
JM
3630 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3631 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3632 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
f1f4e530
JM
3633 {
3634 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
f1f4e530 3635 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
f1f4e530
JM
3636 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3637 {
3638 info->representable_p = false;
3639 info->nregs
3640 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3641 info->offset = offset / regsize_xmode;
3642 return;
3643 }
3644 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3645 {
3646 info->representable_p = false;
3647 info->nregs
3648 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3649 info->offset = offset / regsize_xmode;
3650 return;
3651 }
9ab41c76
AH
3652 /* Quick exit for the simple and common case of extracting whole
3653 subregisters from a multiregister value. */
3654 /* ??? It would be better to integrate this into the code below,
3655 if we can generalize the concept enough and figure out how
3656 odd-sized modes can coexist with the other weird cases we support. */
3657 if (!rknown
3658 && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN
3659 && regsize_xmode == regsize_ymode
3660 && (offset % regsize_ymode) == 0)
3661 {
3662 info->representable_p = true;
3663 info->nregs = nregs_ymode;
3664 info->offset = offset / regsize_ymode;
3665 gcc_assert (info->offset + info->nregs <= nregs_xmode);
3666 return;
3667 }
f1f4e530 3668 }
8521c414 3669
dd79bb7e 3670 /* Lowpart subregs are otherwise valid. */
f1f4e530
JM
3671 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3672 {
3673 info->representable_p = true;
3674 rknown = true;
a446b4e8
JM
3675
3676 if (offset == 0 || nregs_xmode == nregs_ymode)
3677 {
3678 info->offset = 0;
3679 info->nregs = nregs_ymode;
3680 return;
3681 }
f1f4e530 3682 }
04c5580f 3683
dd79bb7e
GK
3684 /* This should always pass, otherwise we don't know how to verify
3685 the constraint. These conditions may be relaxed but
3686 subreg_regno_offset would need to be redesigned. */
41374e13 3687 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
41374e13 3688 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
04c5580f 3689
c0a6a1ef
BS
3690 if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3691 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3692 {
3693 HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3694 HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3695 HOST_WIDE_INT off_low = offset & (ysize - 1);
3696 HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3697 offset = (xsize - ysize - off_high) | off_low;
3698 }
b20b352b 3699 /* The XMODE value can be seen as a vector of NREGS_XMODE
dcc24678 3700 values. The subreg must represent a lowpart of given field.
04c5580f 3701 Compute what field it is. */
f1f4e530
JM
3702 offset_adj = offset;
3703 offset_adj -= subreg_lowpart_offset (ymode,
3704 mode_for_size (GET_MODE_BITSIZE (xmode)
3705 / nregs_xmode,
3706 MODE_INT, 0));
04c5580f 3707
dd79bb7e 3708 /* Size of ymode must not be greater than the size of xmode. */
04c5580f 3709 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
41374e13 3710 gcc_assert (mode_multiple != 0);
04c5580f
JH
3711
3712 y_offset = offset / GET_MODE_SIZE (ymode);
f1f4e530
JM
3713 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3714 nregs_multiple = nregs_xmode / nregs_ymode;
41374e13 3715
f1f4e530 3716 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
41374e13
NS
3717 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3718
f1f4e530
JM
3719 if (!rknown)
3720 {
3721 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3722 rknown = true;
3723 }
3724 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3725 info->nregs = nregs_ymode;
3726}
3727
3728/* This function returns the regno offset of a subreg expression.
3729 xregno - A regno of an inner hard subreg_reg (or what will become one).
3730 xmode - The mode of xregno.
3731 offset - The byte offset.
3732 ymode - The mode of a top level SUBREG (or what may become one).
3733 RETURN - The regno offset which would be used. */
3734unsigned int
ef4bddc2
RS
3735subreg_regno_offset (unsigned int xregno, machine_mode xmode,
3736 unsigned int offset, machine_mode ymode)
f1f4e530
JM
3737{
3738 struct subreg_info info;
3739 subreg_get_info (xregno, xmode, offset, ymode, &info);
3740 return info.offset;
3741}
3742
3743/* This function returns true when the offset is representable via
3744 subreg_offset in the given regno.
3745 xregno - A regno of an inner hard subreg_reg (or what will become one).
3746 xmode - The mode of xregno.
3747 offset - The byte offset.
3748 ymode - The mode of a top level SUBREG (or what may become one).
3749 RETURN - Whether the offset is representable. */
3750bool
ef4bddc2
RS
3751subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
3752 unsigned int offset, machine_mode ymode)
f1f4e530
JM
3753{
3754 struct subreg_info info;
3755 subreg_get_info (xregno, xmode, offset, ymode, &info);
05cee290 3756 return info.representable_p;
04c5580f
JH
3757}
3758
eef302d2
RS
3759/* Return the number of a YMODE register to which
3760
3761 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3762
3763 can be simplified. Return -1 if the subreg can't be simplified.
3764
3765 XREGNO is a hard register number. */
3766
3767int
ef4bddc2
RS
3768simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
3769 unsigned int offset, machine_mode ymode)
eef302d2
RS
3770{
3771 struct subreg_info info;
3772 unsigned int yregno;
3773
3774#ifdef CANNOT_CHANGE_MODE_CLASS
3775 /* Give the backend a chance to disallow the mode change. */
3776 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3777 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
55a2c322
VM
3778 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode)
3779 /* We can use mode change in LRA for some transformations. */
3780 && ! lra_in_progress)
eef302d2
RS
3781 return -1;
3782#endif
3783
3784 /* We shouldn't simplify stack-related registers. */
3785 if ((!reload_completed || frame_pointer_needed)
d4e0d036 3786 && xregno == FRAME_POINTER_REGNUM)
eef302d2
RS
3787 return -1;
3788
3789 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
98072ee5 3790 && xregno == ARG_POINTER_REGNUM)
eef302d2
RS
3791 return -1;
3792
55a2c322
VM
3793 if (xregno == STACK_POINTER_REGNUM
3794 /* We should convert hard stack register in LRA if it is
3795 possible. */
3796 && ! lra_in_progress)
eef302d2
RS
3797 return -1;
3798
3799 /* Try to get the register offset. */
3800 subreg_get_info (xregno, xmode, offset, ymode, &info);
3801 if (!info.representable_p)
3802 return -1;
3803
3804 /* Make sure that the offsetted register value is in range. */
3805 yregno = xregno + info.offset;
3806 if (!HARD_REGISTER_NUM_P (yregno))
3807 return -1;
3808
3809 /* See whether (reg:YMODE YREGNO) is valid.
3810
3811 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
eb93b31f
EB
3812 This is a kludge to work around how complex FP arguments are passed
3813 on IA-64 and should be fixed. See PR target/49226. */
eef302d2
RS
3814 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3815 && HARD_REGNO_MODE_OK (xregno, xmode))
3816 return -1;
3817
3818 return (int) yregno;
3819}
3820
dc297297 3821/* Return the final regno that a subreg expression refers to. */
a6a2274a 3822unsigned int
f7d504c2 3823subreg_regno (const_rtx x)
ddef6bc7
JJ
3824{
3825 unsigned int ret;
3826 rtx subreg = SUBREG_REG (x);
3827 int regno = REGNO (subreg);
3828
a6a2274a
KH
3829 ret = regno + subreg_regno_offset (regno,
3830 GET_MODE (subreg),
ddef6bc7
JJ
3831 SUBREG_BYTE (x),
3832 GET_MODE (x));
3833 return ret;
3834
3835}
f1f4e530
JM
3836
3837/* Return the number of registers that a subreg expression refers
3838 to. */
3839unsigned int
f7d504c2 3840subreg_nregs (const_rtx x)
ba49cb7b
KZ
3841{
3842 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3843}
3844
3845/* Return the number of registers that a subreg REG with REGNO
3846 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3847 changed so that the regno can be passed in. */
3848
3849unsigned int
3850subreg_nregs_with_regno (unsigned int regno, const_rtx x)
f1f4e530
JM
3851{
3852 struct subreg_info info;
3853 rtx subreg = SUBREG_REG (x);
f1f4e530
JM
3854
3855 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3856 &info);
3857 return info.nregs;
3858}
3859
ba49cb7b 3860
833366d6
JH
3861struct parms_set_data
3862{
3863 int nregs;
3864 HARD_REG_SET regs;
3865};
3866
3867/* Helper function for noticing stores to parameter registers. */
3868static void
7bc980e1 3869parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
833366d6 3870{
1634b18f 3871 struct parms_set_data *const d = (struct parms_set_data *) data;
833366d6
JH
3872 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3873 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3874 {
3875 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3876 d->nregs--;
3877 }
3878}
3879
a6a2274a 3880/* Look backward for first parameter to be loaded.
b2df20b4
DJ
3881 Note that loads of all parameters will not necessarily be
3882 found if CSE has eliminated some of them (e.g., an argument
3883 to the outer function is passed down as a parameter).
833366d6 3884 Do not skip BOUNDARY. */
62fc98cc 3885rtx_insn *
9321cf00 3886find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
833366d6
JH
3887{
3888 struct parms_set_data parm;
9321cf00
DM
3889 rtx p;
3890 rtx_insn *before, *first_set;
833366d6
JH
3891
3892 /* Since different machines initialize their parameter registers
3893 in different orders, assume nothing. Collect the set of all
3894 parameter registers. */
3895 CLEAR_HARD_REG_SET (parm.regs);
3896 parm.nregs = 0;
3897 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3898 if (GET_CODE (XEXP (p, 0)) == USE
f8cfc6aa 3899 && REG_P (XEXP (XEXP (p, 0), 0)))
833366d6 3900 {
41374e13 3901 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
833366d6
JH
3902
3903 /* We only care about registers which can hold function
3904 arguments. */
3905 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3906 continue;
3907
3908 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3909 parm.nregs++;
3910 }
3911 before = call_insn;
b2df20b4 3912 first_set = call_insn;
833366d6
JH
3913
3914 /* Search backward for the first set of a register in this set. */
3915 while (parm.nregs && before != boundary)
3916 {
3917 before = PREV_INSN (before);
3918
3919 /* It is possible that some loads got CSEed from one call to
3920 another. Stop in that case. */
4b4bf941 3921 if (CALL_P (before))
833366d6
JH
3922 break;
3923
dbc1a163 3924 /* Our caller needs either ensure that we will find all sets
833366d6 3925 (in case code has not been optimized yet), or take care
eaec9b3d 3926 for possible labels in a way by setting boundary to preceding
833366d6 3927 CODE_LABEL. */
4b4bf941 3928 if (LABEL_P (before))
dbc1a163 3929 {
41374e13 3930 gcc_assert (before == boundary);
dbc1a163
RH
3931 break;
3932 }
833366d6 3933
0d025d43 3934 if (INSN_P (before))
b2df20b4
DJ
3935 {
3936 int nregs_old = parm.nregs;
3937 note_stores (PATTERN (before), parms_set, &parm);
3938 /* If we found something that did not set a parameter reg,
3939 we're done. Do not keep going, as that might result
3940 in hoisting an insn before the setting of a pseudo
3941 that is used by the hoisted insn. */
3942 if (nregs_old != parm.nregs)
3943 first_set = before;
3944 else
3945 break;
3946 }
833366d6 3947 }
9321cf00 3948 return first_set;
833366d6 3949}
3dec4024 3950
14b493d6 3951/* Return true if we should avoid inserting code between INSN and preceding
3dec4024
JH
3952 call instruction. */
3953
3954bool
e4685bc8 3955keep_with_call_p (const rtx_insn *insn)
3dec4024
JH
3956{
3957 rtx set;
3958
3959 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3960 {
f8cfc6aa 3961 if (REG_P (SET_DEST (set))
5df533b3 3962 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3dec4024
JH
3963 && fixed_regs[REGNO (SET_DEST (set))]
3964 && general_operand (SET_SRC (set), VOIDmode))
3965 return true;
f8cfc6aa 3966 if (REG_P (SET_SRC (set))
82f81f18 3967 && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
f8cfc6aa 3968 && REG_P (SET_DEST (set))
3dec4024
JH
3969 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3970 return true;
bc204393
RH
3971 /* There may be a stack pop just after the call and before the store
3972 of the return register. Search for the actual store when deciding
3973 if we can break or not. */
3dec4024
JH
3974 if (SET_DEST (set) == stack_pointer_rtx)
3975 {
75547801 3976 /* This CONST_CAST is okay because next_nonnote_insn just
4e9b57fa 3977 returns its argument and we assign it to a const_rtx
75547801 3978 variable. */
e4685bc8
TS
3979 const rtx_insn *i2
3980 = next_nonnote_insn (const_cast<rtx_insn *> (insn));
bc204393 3981 if (i2 && keep_with_call_p (i2))
3dec4024
JH
3982 return true;
3983 }
3984 }
3985 return false;
3986}
71d2c5bd 3987
432f982f
JH
3988/* Return true if LABEL is a target of JUMP_INSN. This applies only
3989 to non-complex jumps. That is, direct unconditional, conditional,
3990 and tablejumps, but not computed jumps or returns. It also does
3991 not apply to the fallthru case of a conditional jump. */
3992
3993bool
c5241a21 3994label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
432f982f
JH
3995{
3996 rtx tmp = JUMP_LABEL (jump_insn);
8942ee0f 3997 rtx_jump_table_data *table;
432f982f
JH
3998
3999 if (label == tmp)
4000 return true;
4001
8942ee0f 4002 if (tablejump_p (jump_insn, NULL, &table))
432f982f 4003 {
95c43227 4004 rtvec vec = table->get_labels ();
432f982f
JH
4005 int i, veclen = GET_NUM_ELEM (vec);
4006
4007 for (i = 0; i < veclen; ++i)
4008 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
4009 return true;
4010 }
4011
cb2f563b
HPN
4012 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
4013 return true;
4014
432f982f
JH
4015 return false;
4016}
4017
f894b69b
PB
4018\f
4019/* Return an estimate of the cost of computing rtx X.
4020 One use is in cse, to decide which expression to keep in the hash table.
4021 Another is in rtl generation, to pick the cheapest way to multiply.
b8698a0f 4022 Other uses like the latter are expected in the future.
f40751dd 4023
68f932c4
RS
4024 X appears as operand OPNO in an expression with code OUTER_CODE.
4025 SPEED specifies whether costs optimized for speed or size should
f40751dd 4026 be returned. */
f894b69b
PB
4027
4028int
e548c9df
AM
4029rtx_cost (rtx x, machine_mode mode, enum rtx_code outer_code,
4030 int opno, bool speed)
f894b69b
PB
4031{
4032 int i, j;
4033 enum rtx_code code;
4034 const char *fmt;
4035 int total;
e098c169 4036 int factor;
f894b69b
PB
4037
4038 if (x == 0)
4039 return 0;
4040
e548c9df
AM
4041 if (GET_MODE (x) != VOIDmode)
4042 mode = GET_MODE (x);
4043
e098c169
HPN
4044 /* A size N times larger than UNITS_PER_WORD likely needs N times as
4045 many insns, taking N times as long. */
e548c9df 4046 factor = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
e098c169
HPN
4047 if (factor == 0)
4048 factor = 1;
4049
f894b69b
PB
4050 /* Compute the default costs of certain things.
4051 Note that targetm.rtx_costs can override the defaults. */
4052
4053 code = GET_CODE (x);
4054 switch (code)
4055 {
4056 case MULT:
e098c169
HPN
4057 /* Multiplication has time-complexity O(N*N), where N is the
4058 number of units (translated from digits) when using
4059 schoolbook long multiplication. */
4060 total = factor * factor * COSTS_N_INSNS (5);
f894b69b
PB
4061 break;
4062 case DIV:
4063 case UDIV:
4064 case MOD:
4065 case UMOD:
e098c169
HPN
4066 /* Similarly, complexity for schoolbook long division. */
4067 total = factor * factor * COSTS_N_INSNS (7);
f894b69b
PB
4068 break;
4069 case USE:
db3edc20 4070 /* Used in combine.c as a marker. */
f894b69b
PB
4071 total = 0;
4072 break;
e098c169
HPN
4073 case SET:
4074 /* A SET doesn't have a mode, so let's look at the SET_DEST to get
4075 the mode for the factor. */
e548c9df
AM
4076 mode = GET_MODE (SET_DEST (x));
4077 factor = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
e098c169
HPN
4078 if (factor == 0)
4079 factor = 1;
4080 /* Pass through. */
f894b69b 4081 default:
e098c169 4082 total = factor * COSTS_N_INSNS (1);
f894b69b
PB
4083 }
4084
4085 switch (code)
4086 {
4087 case REG:
4088 return 0;
4089
4090 case SUBREG:
edb81165 4091 total = 0;
f894b69b
PB
4092 /* If we can't tie these modes, make this expensive. The larger
4093 the mode, the more expensive it is. */
e548c9df 4094 if (! MODES_TIEABLE_P (mode, GET_MODE (SUBREG_REG (x))))
e098c169 4095 return COSTS_N_INSNS (2 + factor);
f894b69b
PB
4096 break;
4097
4098 default:
e548c9df 4099 if (targetm.rtx_costs (x, mode, outer_code, opno, &total, speed))
f894b69b
PB
4100 return total;
4101 break;
4102 }
4103
4104 /* Sum the costs of the sub-rtx's, plus cost of this operation,
4105 which is already in total. */
4106
4107 fmt = GET_RTX_FORMAT (code);
4108 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4109 if (fmt[i] == 'e')
e548c9df 4110 total += rtx_cost (XEXP (x, i), mode, code, i, speed);
f894b69b
PB
4111 else if (fmt[i] == 'E')
4112 for (j = 0; j < XVECLEN (x, i); j++)
e548c9df 4113 total += rtx_cost (XVECEXP (x, i, j), mode, code, i, speed);
f894b69b
PB
4114
4115 return total;
4116}
22939744
BS
4117
4118/* Fill in the structure C with information about both speed and size rtx
68f932c4 4119 costs for X, which is operand OPNO in an expression with code OUTER. */
22939744
BS
4120
4121void
e548c9df 4122get_full_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno,
68f932c4 4123 struct full_rtx_costs *c)
22939744 4124{
e548c9df
AM
4125 c->speed = rtx_cost (x, mode, outer, opno, true);
4126 c->size = rtx_cost (x, mode, outer, opno, false);
22939744
BS
4127}
4128
f894b69b
PB
4129\f
4130/* Return cost of address expression X.
b8698a0f 4131 Expect that X is properly formed address reference.
f40751dd
JH
4132
4133 SPEED parameter specify whether costs optimized for speed or size should
4134 be returned. */
f894b69b
PB
4135
4136int
ef4bddc2 4137address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
f894b69b 4138{
f894b69b
PB
4139 /* We may be asked for cost of various unusual addresses, such as operands
4140 of push instruction. It is not worthwhile to complicate writing
4141 of the target hook by such cases. */
4142
09e881c9 4143 if (!memory_address_addr_space_p (mode, x, as))
f894b69b
PB
4144 return 1000;
4145
b413068c 4146 return targetm.address_cost (x, mode, as, speed);
f894b69b
PB
4147}
4148
4149/* If the target doesn't override, compute the cost as with arithmetic. */
4150
4151int
ef4bddc2 4152default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
f894b69b 4153{
e548c9df 4154 return rtx_cost (x, Pmode, MEM, 0, speed);
f894b69b 4155}
2f93eea8
PB
4156\f
4157
4158unsigned HOST_WIDE_INT
ef4bddc2 4159nonzero_bits (const_rtx x, machine_mode mode)
2f93eea8
PB
4160{
4161 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
4162}
4163
4164unsigned int
ef4bddc2 4165num_sign_bit_copies (const_rtx x, machine_mode mode)
2f93eea8
PB
4166{
4167 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
4168}
4169
4170/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
4171 It avoids exponential behavior in nonzero_bits1 when X has
4172 identical subexpressions on the first or the second level. */
4173
4174static unsigned HOST_WIDE_INT
ef4bddc2
RS
4175cached_nonzero_bits (const_rtx x, machine_mode mode, const_rtx known_x,
4176 machine_mode known_mode,
2f93eea8
PB
4177 unsigned HOST_WIDE_INT known_ret)
4178{
4179 if (x == known_x && mode == known_mode)
4180 return known_ret;
4181
4182 /* Try to find identical subexpressions. If found call
4183 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
4184 precomputed value for the subexpression as KNOWN_RET. */
4185
4186 if (ARITHMETIC_P (x))
4187 {
4188 rtx x0 = XEXP (x, 0);
4189 rtx x1 = XEXP (x, 1);
4190
4191 /* Check the first level. */
4192 if (x0 == x1)
4193 return nonzero_bits1 (x, mode, x0, mode,
4194 cached_nonzero_bits (x0, mode, known_x,
4195 known_mode, known_ret));
4196
4197 /* Check the second level. */
4198 if (ARITHMETIC_P (x0)
4199 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4200 return nonzero_bits1 (x, mode, x1, mode,
4201 cached_nonzero_bits (x1, mode, known_x,
4202 known_mode, known_ret));
4203
4204 if (ARITHMETIC_P (x1)
4205 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4206 return nonzero_bits1 (x, mode, x0, mode,
4207 cached_nonzero_bits (x0, mode, known_x,
4208 known_mode, known_ret));
4209 }
4210
4211 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
4212}
4213
4214/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
4215 We don't let nonzero_bits recur into num_sign_bit_copies, because that
4216 is less useful. We can't allow both, because that results in exponential
4217 run time recursion. There is a nullstone testcase that triggered
4218 this. This macro avoids accidental uses of num_sign_bit_copies. */
4219#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
4220
4221/* Given an expression, X, compute which bits in X can be nonzero.
4222 We don't care about bits outside of those defined in MODE.
4223
4224 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
4225 an arithmetic operation, we can do better. */
4226
4227static unsigned HOST_WIDE_INT
ef4bddc2
RS
4228nonzero_bits1 (const_rtx x, machine_mode mode, const_rtx known_x,
4229 machine_mode known_mode,
2f93eea8
PB
4230 unsigned HOST_WIDE_INT known_ret)
4231{
4232 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
4233 unsigned HOST_WIDE_INT inner_nz;
4234 enum rtx_code code;
ef4bddc2 4235 machine_mode inner_mode;
5511bc5a 4236 unsigned int mode_width = GET_MODE_PRECISION (mode);
2f93eea8 4237
ff596cd2
RL
4238 /* For floating-point and vector values, assume all bits are needed. */
4239 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
4240 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
2f93eea8
PB
4241 return nonzero;
4242
4243 /* If X is wider than MODE, use its mode instead. */
5511bc5a 4244 if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
2f93eea8
PB
4245 {
4246 mode = GET_MODE (x);
4247 nonzero = GET_MODE_MASK (mode);
5511bc5a 4248 mode_width = GET_MODE_PRECISION (mode);
2f93eea8
PB
4249 }
4250
4251 if (mode_width > HOST_BITS_PER_WIDE_INT)
4252 /* Our only callers in this case look for single bit values. So
4253 just return the mode mask. Those tests will then be false. */
4254 return nonzero;
4255
4256#ifndef WORD_REGISTER_OPERATIONS
4257 /* If MODE is wider than X, but both are a single word for both the host
4258 and target machines, we can compute this from which bits of the
4259 object might be nonzero in its own mode, taking into account the fact
4260 that on many CISC machines, accessing an object in a wider mode
4261 causes the high-order bits to become undefined. So they are
4262 not known to be zero. */
4263
4264 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
5511bc5a
BS
4265 && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
4266 && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
4267 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4268 {
4269 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
4270 known_x, known_mode, known_ret);
4271 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
4272 return nonzero;
4273 }
4274#endif
4275
4276 code = GET_CODE (x);
4277 switch (code)
4278 {
4279 case REG:
4280#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4281 /* If pointers extend unsigned and this is a pointer in Pmode, say that
4282 all the bits above ptr_mode are known to be zero. */
5932a4d4 4283 /* As we do not know which address space the pointer is referring to,
d4ebfa65
BE
4284 we can do this only if the target does not support different pointer
4285 or address modes depending on the address space. */
4286 if (target_default_pointer_address_modes_p ()
4287 && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
2f93eea8
PB
4288 && REG_POINTER (x))
4289 nonzero &= GET_MODE_MASK (ptr_mode);
4290#endif
4291
4292 /* Include declared information about alignment of pointers. */
4293 /* ??? We don't properly preserve REG_POINTER changes across
4294 pointer-to-integer casts, so we can't trust it except for
4295 things that we know must be pointers. See execute/960116-1.c. */
4296 if ((x == stack_pointer_rtx
4297 || x == frame_pointer_rtx
4298 || x == arg_pointer_rtx)
4299 && REGNO_POINTER_ALIGN (REGNO (x)))
4300 {
4301 unsigned HOST_WIDE_INT alignment
4302 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
4303
4304#ifdef PUSH_ROUNDING
4305 /* If PUSH_ROUNDING is defined, it is possible for the
4306 stack to be momentarily aligned only to that amount,
4307 so we pick the least alignment. */
4308 if (x == stack_pointer_rtx && PUSH_ARGS)
4309 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
4310 alignment);
4311#endif
4312
4313 nonzero &= ~(alignment - 1);
4314 }
4315
4316 {
4317 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
55d796da 4318 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
2f93eea8
PB
4319 known_mode, known_ret,
4320 &nonzero_for_hook);
4321
55d796da
KG
4322 if (new_rtx)
4323 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
2f93eea8
PB
4324 known_mode, known_ret);
4325
4326 return nonzero_for_hook;
4327 }
4328
4329 case CONST_INT:
4330#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4331 /* If X is negative in MODE, sign-extend the value. */
c04fc4f0
EB
4332 if (INTVAL (x) > 0
4333 && mode_width < BITS_PER_WORD
4334 && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4335 != 0)
0cadbfaa 4336 return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
2f93eea8
PB
4337#endif
4338
c04fc4f0 4339 return UINTVAL (x);
2f93eea8
PB
4340
4341 case MEM:
4342#ifdef LOAD_EXTEND_OP
4343 /* In many, if not most, RISC machines, reading a byte from memory
4344 zeros the rest of the register. Noticing that fact saves a lot
4345 of extra zero-extends. */
4346 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4347 nonzero &= GET_MODE_MASK (GET_MODE (x));
4348#endif
4349 break;
4350
4351 case EQ: case NE:
4352 case UNEQ: case LTGT:
4353 case GT: case GTU: case UNGT:
4354 case LT: case LTU: case UNLT:
4355 case GE: case GEU: case UNGE:
4356 case LE: case LEU: case UNLE:
4357 case UNORDERED: case ORDERED:
2f93eea8
PB
4358 /* If this produces an integer result, we know which bits are set.
4359 Code here used to clear bits outside the mode of X, but that is
4360 now done above. */
b8698a0f
L
4361 /* Mind that MODE is the mode the caller wants to look at this
4362 operation in, and not the actual operation mode. We can wind
505ac507
RH
4363 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4364 that describes the results of a vector compare. */
4365 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2f93eea8
PB
4366 && mode_width <= HOST_BITS_PER_WIDE_INT)
4367 nonzero = STORE_FLAG_VALUE;
4368 break;
4369
4370 case NEG:
4371#if 0
4372 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4373 and num_sign_bit_copies. */
4374 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5511bc5a 4375 == GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4376 nonzero = 1;
4377#endif
4378
86cdf393 4379 if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
2f93eea8
PB
4380 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4381 break;
4382
4383 case ABS:
4384#if 0
4385 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4386 and num_sign_bit_copies. */
4387 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5511bc5a 4388 == GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4389 nonzero = 1;
4390#endif
4391 break;
4392
4393 case TRUNCATE:
4394 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4395 known_x, known_mode, known_ret)
4396 & GET_MODE_MASK (mode));
4397 break;
4398
4399 case ZERO_EXTEND:
4400 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4401 known_x, known_mode, known_ret);
4402 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4403 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4404 break;
4405
4406 case SIGN_EXTEND:
4407 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4408 Otherwise, show all the bits in the outer mode but not the inner
4409 may be nonzero. */
4410 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4411 known_x, known_mode, known_ret);
4412 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4413 {
4414 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
2d0c270f 4415 if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
2f93eea8
PB
4416 inner_nz |= (GET_MODE_MASK (mode)
4417 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4418 }
4419
4420 nonzero &= inner_nz;
4421 break;
4422
4423 case AND:
4424 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4425 known_x, known_mode, known_ret)
4426 & cached_nonzero_bits (XEXP (x, 1), mode,
4427 known_x, known_mode, known_ret);
4428 break;
4429
4430 case XOR: case IOR:
4431 case UMIN: case UMAX: case SMIN: case SMAX:
4432 {
c04fc4f0
EB
4433 unsigned HOST_WIDE_INT nonzero0
4434 = cached_nonzero_bits (XEXP (x, 0), mode,
4435 known_x, known_mode, known_ret);
2f93eea8
PB
4436
4437 /* Don't call nonzero_bits for the second time if it cannot change
4438 anything. */
4439 if ((nonzero & nonzero0) != nonzero)
4440 nonzero &= nonzero0
4441 | cached_nonzero_bits (XEXP (x, 1), mode,
4442 known_x, known_mode, known_ret);
4443 }
4444 break;
4445
4446 case PLUS: case MINUS:
4447 case MULT:
4448 case DIV: case UDIV:
4449 case MOD: case UMOD:
4450 /* We can apply the rules of arithmetic to compute the number of
4451 high- and low-order zero bits of these operations. We start by
4452 computing the width (position of the highest-order nonzero bit)
4453 and the number of low-order zero bits for each value. */
4454 {
c04fc4f0
EB
4455 unsigned HOST_WIDE_INT nz0
4456 = cached_nonzero_bits (XEXP (x, 0), mode,
4457 known_x, known_mode, known_ret);
4458 unsigned HOST_WIDE_INT nz1
4459 = cached_nonzero_bits (XEXP (x, 1), mode,
4460 known_x, known_mode, known_ret);
5511bc5a 4461 int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
2f93eea8
PB
4462 int width0 = floor_log2 (nz0) + 1;
4463 int width1 = floor_log2 (nz1) + 1;
4464 int low0 = floor_log2 (nz0 & -nz0);
4465 int low1 = floor_log2 (nz1 & -nz1);
c04fc4f0
EB
4466 unsigned HOST_WIDE_INT op0_maybe_minusp
4467 = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4468 unsigned HOST_WIDE_INT op1_maybe_minusp
4469 = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
2f93eea8
PB
4470 unsigned int result_width = mode_width;
4471 int result_low = 0;
4472
4473 switch (code)
4474 {
4475 case PLUS:
4476 result_width = MAX (width0, width1) + 1;
4477 result_low = MIN (low0, low1);
4478 break;
4479 case MINUS:
4480 result_low = MIN (low0, low1);
4481 break;
4482 case MULT:
4483 result_width = width0 + width1;
4484 result_low = low0 + low1;
4485 break;
4486 case DIV:
4487 if (width1 == 0)
4488 break;
c04fc4f0 4489 if (!op0_maybe_minusp && !op1_maybe_minusp)
2f93eea8
PB
4490 result_width = width0;
4491 break;
4492 case UDIV:
4493 if (width1 == 0)
4494 break;
4495 result_width = width0;
4496 break;
4497 case MOD:
4498 if (width1 == 0)
4499 break;
c04fc4f0 4500 if (!op0_maybe_minusp && !op1_maybe_minusp)
2f93eea8
PB
4501 result_width = MIN (width0, width1);
4502 result_low = MIN (low0, low1);
4503 break;
4504 case UMOD:
4505 if (width1 == 0)
4506 break;
4507 result_width = MIN (width0, width1);
4508 result_low = MIN (low0, low1);
4509 break;
4510 default:
41374e13 4511 gcc_unreachable ();
2f93eea8
PB
4512 }
4513
4514 if (result_width < mode_width)
c04fc4f0 4515 nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
2f93eea8
PB
4516
4517 if (result_low > 0)
c04fc4f0 4518 nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
2f93eea8
PB
4519 }
4520 break;
4521
4522 case ZERO_EXTRACT:
481683e1 4523 if (CONST_INT_P (XEXP (x, 1))
2f93eea8 4524 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
c04fc4f0 4525 nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
2f93eea8
PB
4526 break;
4527
4528 case SUBREG:
4529 /* If this is a SUBREG formed for a promoted variable that has
4530 been zero-extended, we know that at least the high-order bits
4531 are zero, though others might be too. */
4532
362d42dc 4533 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
2f93eea8
PB
4534 nonzero = GET_MODE_MASK (GET_MODE (x))
4535 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4536 known_x, known_mode, known_ret);
4537
2d0c270f 4538 inner_mode = GET_MODE (SUBREG_REG (x));
2f93eea8
PB
4539 /* If the inner mode is a single word for both the host and target
4540 machines, we can compute this from which bits of the inner
4541 object might be nonzero. */
5511bc5a
BS
4542 if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4543 && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
2f93eea8
PB
4544 {
4545 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4546 known_x, known_mode, known_ret);
4547
4548#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4549 /* If this is a typical RISC machine, we only have to worry
4550 about the way loads are extended. */
2d0c270f
BS
4551 if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4552 ? val_signbit_known_set_p (inner_mode, nonzero)
4553 : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
3c0cb5de 4554 || !MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4555#endif
4556 {
4557 /* On many CISC machines, accessing an object in a wider mode
4558 causes the high-order bits to become undefined. So they are
4559 not known to be zero. */
5511bc5a
BS
4560 if (GET_MODE_PRECISION (GET_MODE (x))
4561 > GET_MODE_PRECISION (inner_mode))
2f93eea8 4562 nonzero |= (GET_MODE_MASK (GET_MODE (x))
2d0c270f 4563 & ~GET_MODE_MASK (inner_mode));
2f93eea8
PB
4564 }
4565 }
4566 break;
4567
4568 case ASHIFTRT:
4569 case LSHIFTRT:
4570 case ASHIFT:
4571 case ROTATE:
4572 /* The nonzero bits are in two classes: any bits within MODE
4573 that aren't in GET_MODE (x) are always significant. The rest of the
4574 nonzero bits are those that are significant in the operand of
4575 the shift when shifted the appropriate number of bits. This
4576 shows that high-order bits are cleared by the right shift and
4577 low-order bits by left shifts. */
481683e1 4578 if (CONST_INT_P (XEXP (x, 1))
2f93eea8 4579 && INTVAL (XEXP (x, 1)) >= 0
39b2ac74 4580 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5511bc5a 4581 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8 4582 {
ef4bddc2 4583 machine_mode inner_mode = GET_MODE (x);
5511bc5a 4584 unsigned int width = GET_MODE_PRECISION (inner_mode);
2f93eea8
PB
4585 int count = INTVAL (XEXP (x, 1));
4586 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
c04fc4f0
EB
4587 unsigned HOST_WIDE_INT op_nonzero
4588 = cached_nonzero_bits (XEXP (x, 0), mode,
4589 known_x, known_mode, known_ret);
2f93eea8
PB
4590 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4591 unsigned HOST_WIDE_INT outer = 0;
4592
4593 if (mode_width > width)
4594 outer = (op_nonzero & nonzero & ~mode_mask);
4595
4596 if (code == LSHIFTRT)
4597 inner >>= count;
4598 else if (code == ASHIFTRT)
4599 {
4600 inner >>= count;
4601
4602 /* If the sign bit may have been nonzero before the shift, we
4603 need to mark all the places it could have been copied to
4604 by the shift as possibly nonzero. */
c04fc4f0
EB
4605 if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4606 inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4607 << (width - count);
2f93eea8
PB
4608 }
4609 else if (code == ASHIFT)
4610 inner <<= count;
4611 else
4612 inner = ((inner << (count % width)
4613 | (inner >> (width - (count % width)))) & mode_mask);
4614
4615 nonzero &= (outer | inner);
4616 }
4617 break;
4618
4619 case FFS:
4620 case POPCOUNT:
4621 /* This is at most the number of bits in the mode. */
c04fc4f0 4622 nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
2f93eea8
PB
4623 break;
4624
4625 case CLZ:
4626 /* If CLZ has a known value at zero, then the nonzero bits are
4627 that value, plus the number of bits in the mode minus one. */
4628 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
c04fc4f0
EB
4629 nonzero
4630 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
2f93eea8
PB
4631 else
4632 nonzero = -1;
4633 break;
4634
4635 case CTZ:
4636 /* If CTZ has a known value at zero, then the nonzero bits are
4637 that value, plus the number of bits in the mode minus one. */
4638 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
c04fc4f0
EB
4639 nonzero
4640 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
2f93eea8
PB
4641 else
4642 nonzero = -1;
4643 break;
4644
8840ae2b
JJ
4645 case CLRSB:
4646 /* This is at most the number of bits in the mode minus 1. */
4647 nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4648 break;
4649
2f93eea8
PB
4650 case PARITY:
4651 nonzero = 1;
4652 break;
4653
4654 case IF_THEN_ELSE:
4655 {
c04fc4f0
EB
4656 unsigned HOST_WIDE_INT nonzero_true
4657 = cached_nonzero_bits (XEXP (x, 1), mode,
4658 known_x, known_mode, known_ret);
2f93eea8
PB
4659
4660 /* Don't call nonzero_bits for the second time if it cannot change
4661 anything. */
4662 if ((nonzero & nonzero_true) != nonzero)
4663 nonzero &= nonzero_true
4664 | cached_nonzero_bits (XEXP (x, 2), mode,
4665 known_x, known_mode, known_ret);
4666 }
4667 break;
4668
4669 default:
4670 break;
4671 }
4672
4673 return nonzero;
4674}
4675
4676/* See the macro definition above. */
4677#undef cached_num_sign_bit_copies
4678
4679\f
4680/* The function cached_num_sign_bit_copies is a wrapper around
4681 num_sign_bit_copies1. It avoids exponential behavior in
4682 num_sign_bit_copies1 when X has identical subexpressions on the
4683 first or the second level. */
4684
4685static unsigned int
ef4bddc2
RS
4686cached_num_sign_bit_copies (const_rtx x, machine_mode mode, const_rtx known_x,
4687 machine_mode known_mode,
2f93eea8
PB
4688 unsigned int known_ret)
4689{
4690 if (x == known_x && mode == known_mode)
4691 return known_ret;
4692
4693 /* Try to find identical subexpressions. If found call
4694 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4695 the precomputed value for the subexpression as KNOWN_RET. */
4696
4697 if (ARITHMETIC_P (x))
4698 {
4699 rtx x0 = XEXP (x, 0);
4700 rtx x1 = XEXP (x, 1);
4701
4702 /* Check the first level. */
4703 if (x0 == x1)
4704 return
4705 num_sign_bit_copies1 (x, mode, x0, mode,
4706 cached_num_sign_bit_copies (x0, mode, known_x,
4707 known_mode,
4708 known_ret));
4709
4710 /* Check the second level. */
4711 if (ARITHMETIC_P (x0)
4712 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4713 return
4714 num_sign_bit_copies1 (x, mode, x1, mode,
4715 cached_num_sign_bit_copies (x1, mode, known_x,
4716 known_mode,
4717 known_ret));
4718
4719 if (ARITHMETIC_P (x1)
4720 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4721 return
4722 num_sign_bit_copies1 (x, mode, x0, mode,
4723 cached_num_sign_bit_copies (x0, mode, known_x,
4724 known_mode,
4725 known_ret));
4726 }
4727
4728 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4729}
4730
4731/* Return the number of bits at the high-order end of X that are known to
4732 be equal to the sign bit. X will be used in mode MODE; if MODE is
4733 VOIDmode, X will be used in its own mode. The returned value will always
4734 be between 1 and the number of bits in MODE. */
4735
4736static unsigned int
ef4bddc2
RS
4737num_sign_bit_copies1 (const_rtx x, machine_mode mode, const_rtx known_x,
4738 machine_mode known_mode,
2f93eea8
PB
4739 unsigned int known_ret)
4740{
4741 enum rtx_code code = GET_CODE (x);
5511bc5a 4742 unsigned int bitwidth = GET_MODE_PRECISION (mode);
2f93eea8
PB
4743 int num0, num1, result;
4744 unsigned HOST_WIDE_INT nonzero;
4745
4746 /* If we weren't given a mode, use the mode of X. If the mode is still
4747 VOIDmode, we don't know anything. Likewise if one of the modes is
4748 floating-point. */
4749
4750 if (mode == VOIDmode)
4751 mode = GET_MODE (x);
4752
ff596cd2
RL
4753 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4754 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
2f93eea8
PB
4755 return 1;
4756
4757 /* For a smaller object, just ignore the high bits. */
5511bc5a 4758 if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4759 {
4760 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4761 known_x, known_mode, known_ret);
4762 return MAX (1,
5511bc5a 4763 num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
2f93eea8
PB
4764 }
4765
5511bc5a 4766 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4767 {
4768#ifndef WORD_REGISTER_OPERATIONS
5511bc5a
BS
4769 /* If this machine does not do all register operations on the entire
4770 register and MODE is wider than the mode of X, we can say nothing
4771 at all about the high-order bits. */
2f93eea8
PB
4772 return 1;
4773#else
4774 /* Likewise on machines that do, if the mode of the object is smaller
4775 than a word and loads of that size don't sign extend, we can say
4776 nothing about the high order bits. */
5511bc5a 4777 if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
2f93eea8
PB
4778#ifdef LOAD_EXTEND_OP
4779 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4780#endif
4781 )
4782 return 1;
4783#endif
4784 }
4785
4786 switch (code)
4787 {
4788 case REG:
4789
4790#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4791 /* If pointers extend signed and this is a pointer in Pmode, say that
4792 all the bits above ptr_mode are known to be sign bit copies. */
5932a4d4 4793 /* As we do not know which address space the pointer is referring to,
d4ebfa65
BE
4794 we can do this only if the target does not support different pointer
4795 or address modes depending on the address space. */
4796 if (target_default_pointer_address_modes_p ()
4797 && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4798 && mode == Pmode && REG_POINTER (x))
5511bc5a 4799 return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
2f93eea8
PB
4800#endif
4801
4802 {
4803 unsigned int copies_for_hook = 1, copies = 1;
55d796da 4804 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
2f93eea8
PB
4805 known_mode, known_ret,
4806 &copies_for_hook);
4807
55d796da
KG
4808 if (new_rtx)
4809 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
2f93eea8
PB
4810 known_mode, known_ret);
4811
4812 if (copies > 1 || copies_for_hook > 1)
4813 return MAX (copies, copies_for_hook);
4814
4815 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4816 }
4817 break;
4818
4819 case MEM:
4820#ifdef LOAD_EXTEND_OP
4821 /* Some RISC machines sign-extend all loads of smaller than a word. */
4822 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4823 return MAX (1, ((int) bitwidth
5511bc5a 4824 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
2f93eea8
PB
4825#endif
4826 break;
4827
4828 case CONST_INT:
4829 /* If the constant is negative, take its 1's complement and remask.
4830 Then see how many zero bits we have. */
c04fc4f0 4831 nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
2f93eea8 4832 if (bitwidth <= HOST_BITS_PER_WIDE_INT
c04fc4f0 4833 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8
PB
4834 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4835
4836 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4837
4838 case SUBREG:
4839 /* If this is a SUBREG for a promoted object that is sign-extended
4840 and we are looking at it in a wider mode, we know that at least the
4841 high-order bits are known to be sign bit copies. */
4842
362d42dc 4843 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
2f93eea8
PB
4844 {
4845 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4846 known_x, known_mode, known_ret);
4847 return MAX ((int) bitwidth
5511bc5a 4848 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
2f93eea8
PB
4849 num0);
4850 }
4851
4852 /* For a smaller object, just ignore the high bits. */
5511bc5a 4853 if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
2f93eea8
PB
4854 {
4855 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4856 known_x, known_mode, known_ret);
4857 return MAX (1, (num0
5511bc5a 4858 - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
2f93eea8
PB
4859 - bitwidth)));
4860 }
4861
4862#ifdef WORD_REGISTER_OPERATIONS
4863#ifdef LOAD_EXTEND_OP
4864 /* For paradoxical SUBREGs on machines where all register operations
4865 affect the entire register, just look inside. Note that we are
4866 passing MODE to the recursive call, so the number of sign bit copies
4867 will remain relative to that mode, not the inner mode. */
4868
4869 /* This works only if loads sign extend. Otherwise, if we get a
4870 reload for the inner part, it may be loaded from the stack, and
4871 then we lose all sign bit copies that existed before the store
4872 to the stack. */
4873
6a4bdc79 4874 if (paradoxical_subreg_p (x)
2f93eea8 4875 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3c0cb5de 4876 && MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4877 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4878 known_x, known_mode, known_ret);
4879#endif
4880#endif
4881 break;
4882
4883 case SIGN_EXTRACT:
481683e1 4884 if (CONST_INT_P (XEXP (x, 1)))
2f93eea8
PB
4885 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4886 break;
4887
4888 case SIGN_EXTEND:
5511bc5a 4889 return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
2f93eea8
PB
4890 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4891 known_x, known_mode, known_ret));
4892
4893 case TRUNCATE:
4894 /* For a smaller object, just ignore the high bits. */
4895 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4896 known_x, known_mode, known_ret);
5511bc5a 4897 return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
2f93eea8
PB
4898 - bitwidth)));
4899
4900 case NOT:
4901 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4902 known_x, known_mode, known_ret);
4903
4904 case ROTATE: case ROTATERT:
4905 /* If we are rotating left by a number of bits less than the number
4906 of sign bit copies, we can just subtract that amount from the
4907 number. */
481683e1 4908 if (CONST_INT_P (XEXP (x, 1))
2f93eea8
PB
4909 && INTVAL (XEXP (x, 1)) >= 0
4910 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4911 {
4912 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4913 known_x, known_mode, known_ret);
4914 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4915 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4916 }
4917 break;
4918
4919 case NEG:
4920 /* In general, this subtracts one sign bit copy. But if the value
4921 is known to be positive, the number of sign bit copies is the
4922 same as that of the input. Finally, if the input has just one bit
4923 that might be nonzero, all the bits are copies of the sign bit. */
4924 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4925 known_x, known_mode, known_ret);
4926 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4927 return num0 > 1 ? num0 - 1 : 1;
4928
4929 nonzero = nonzero_bits (XEXP (x, 0), mode);
4930 if (nonzero == 1)
4931 return bitwidth;
4932
4933 if (num0 > 1
c04fc4f0 4934 && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
2f93eea8
PB
4935 num0--;
4936
4937 return num0;
4938
4939 case IOR: case AND: case XOR:
4940 case SMIN: case SMAX: case UMIN: case UMAX:
4941 /* Logical operations will preserve the number of sign-bit copies.
4942 MIN and MAX operations always return one of the operands. */
4943 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4944 known_x, known_mode, known_ret);
4945 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4946 known_x, known_mode, known_ret);
22761ec3
AN
4947
4948 /* If num1 is clearing some of the top bits then regardless of
4949 the other term, we are guaranteed to have at least that many
4950 high-order zero bits. */
4951 if (code == AND
4952 && num1 > 1
4953 && bitwidth <= HOST_BITS_PER_WIDE_INT
481683e1 4954 && CONST_INT_P (XEXP (x, 1))
c04fc4f0
EB
4955 && (UINTVAL (XEXP (x, 1))
4956 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
22761ec3
AN
4957 return num1;
4958
4959 /* Similarly for IOR when setting high-order bits. */
4960 if (code == IOR
4961 && num1 > 1
4962 && bitwidth <= HOST_BITS_PER_WIDE_INT
481683e1 4963 && CONST_INT_P (XEXP (x, 1))
c04fc4f0
EB
4964 && (UINTVAL (XEXP (x, 1))
4965 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
22761ec3
AN
4966 return num1;
4967
2f93eea8
PB
4968 return MIN (num0, num1);
4969
4970 case PLUS: case MINUS:
4971 /* For addition and subtraction, we can have a 1-bit carry. However,
4972 if we are subtracting 1 from a positive number, there will not
4973 be such a carry. Furthermore, if the positive number is known to
4974 be 0 or 1, we know the result is either -1 or 0. */
4975
4976 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4977 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4978 {
4979 nonzero = nonzero_bits (XEXP (x, 0), mode);
c04fc4f0 4980 if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
2f93eea8
PB
4981 return (nonzero == 1 || nonzero == 0 ? bitwidth
4982 : bitwidth - floor_log2 (nonzero) - 1);
4983 }
4984
4985 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4986 known_x, known_mode, known_ret);
4987 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4988 known_x, known_mode, known_ret);
4989 result = MAX (1, MIN (num0, num1) - 1);
4990
2f93eea8
PB
4991 return result;
4992
4993 case MULT:
4994 /* The number of bits of the product is the sum of the number of
4995 bits of both terms. However, unless one of the terms if known
4996 to be positive, we must allow for an additional bit since negating
4997 a negative number can remove one sign bit copy. */
4998
4999 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5000 known_x, known_mode, known_ret);
5001 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5002 known_x, known_mode, known_ret);
5003
5004 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
5005 if (result > 0
5006 && (bitwidth > HOST_BITS_PER_WIDE_INT
5007 || (((nonzero_bits (XEXP (x, 0), mode)
c04fc4f0 5008 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8 5009 && ((nonzero_bits (XEXP (x, 1), mode)
c04fc4f0
EB
5010 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
5011 != 0))))
2f93eea8
PB
5012 result--;
5013
5014 return MAX (1, result);
5015
5016 case UDIV:
5017 /* The result must be <= the first operand. If the first operand
5018 has the high bit set, we know nothing about the number of sign
5019 bit copies. */
5020 if (bitwidth > HOST_BITS_PER_WIDE_INT)
5021 return 1;
5022 else if ((nonzero_bits (XEXP (x, 0), mode)
c04fc4f0 5023 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8
PB
5024 return 1;
5025 else
5026 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
5027 known_x, known_mode, known_ret);
5028
5029 case UMOD:
24d179b4
JJ
5030 /* The result must be <= the second operand. If the second operand
5031 has (or just might have) the high bit set, we know nothing about
5032 the number of sign bit copies. */
5033 if (bitwidth > HOST_BITS_PER_WIDE_INT)
5034 return 1;
5035 else if ((nonzero_bits (XEXP (x, 1), mode)
c04fc4f0 5036 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
24d179b4
JJ
5037 return 1;
5038 else
5039 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
2f93eea8
PB
5040 known_x, known_mode, known_ret);
5041
5042 case DIV:
5043 /* Similar to unsigned division, except that we have to worry about
5044 the case where the divisor is negative, in which case we have
5045 to add 1. */
5046 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5047 known_x, known_mode, known_ret);
5048 if (result > 1
5049 && (bitwidth > HOST_BITS_PER_WIDE_INT
5050 || (nonzero_bits (XEXP (x, 1), mode)
c04fc4f0 5051 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
2f93eea8
PB
5052 result--;
5053
5054 return result;
5055
5056 case MOD:
5057 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5058 known_x, known_mode, known_ret);
5059 if (result > 1
5060 && (bitwidth > HOST_BITS_PER_WIDE_INT
5061 || (nonzero_bits (XEXP (x, 1), mode)
c04fc4f0 5062 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
2f93eea8
PB
5063 result--;
5064
5065 return result;
5066
5067 case ASHIFTRT:
5068 /* Shifts by a constant add to the number of bits equal to the
5069 sign bit. */
5070 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5071 known_x, known_mode, known_ret);
481683e1 5072 if (CONST_INT_P (XEXP (x, 1))
39b2ac74 5073 && INTVAL (XEXP (x, 1)) > 0
5511bc5a 5074 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
5075 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
5076
5077 return num0;
5078
5079 case ASHIFT:
5080 /* Left shifts destroy copies. */
481683e1 5081 if (!CONST_INT_P (XEXP (x, 1))
2f93eea8 5082 || INTVAL (XEXP (x, 1)) < 0
39b2ac74 5083 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
5511bc5a 5084 || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
5085 return 1;
5086
5087 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
5088 known_x, known_mode, known_ret);
5089 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
5090
5091 case IF_THEN_ELSE:
5092 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
5093 known_x, known_mode, known_ret);
5094 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
5095 known_x, known_mode, known_ret);
5096 return MIN (num0, num1);
5097
5098 case EQ: case NE: case GE: case GT: case LE: case LT:
5099 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
5100 case GEU: case GTU: case LEU: case LTU:
5101 case UNORDERED: case ORDERED:
5102 /* If the constant is negative, take its 1's complement and remask.
5103 Then see how many zero bits we have. */
5104 nonzero = STORE_FLAG_VALUE;
5105 if (bitwidth <= HOST_BITS_PER_WIDE_INT
c04fc4f0 5106 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8
PB
5107 nonzero = (~nonzero) & GET_MODE_MASK (mode);
5108
5109 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
5110
5111 default:
5112 break;
5113 }
5114
5115 /* If we haven't been able to figure it out by one of the above rules,
5116 see if some of the high-order bits are known to be zero. If so,
5117 count those bits and return one less than that amount. If we can't
5118 safely compute the mask for this mode, always return BITWIDTH. */
5119
5511bc5a 5120 bitwidth = GET_MODE_PRECISION (mode);
2f93eea8
PB
5121 if (bitwidth > HOST_BITS_PER_WIDE_INT)
5122 return 1;
5123
5124 nonzero = nonzero_bits (x, mode);
c04fc4f0 5125 return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
2f93eea8
PB
5126 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
5127}
6fd21094
RS
5128
5129/* Calculate the rtx_cost of a single instruction. A return value of
5130 zero indicates an instruction pattern without a known cost. */
5131
5132int
f40751dd 5133insn_rtx_cost (rtx pat, bool speed)
6fd21094
RS
5134{
5135 int i, cost;
5136 rtx set;
5137
5138 /* Extract the single set rtx from the instruction pattern.
5139 We can't use single_set since we only have the pattern. */
5140 if (GET_CODE (pat) == SET)
5141 set = pat;
5142 else if (GET_CODE (pat) == PARALLEL)
5143 {
5144 set = NULL_RTX;
5145 for (i = 0; i < XVECLEN (pat, 0); i++)
5146 {
5147 rtx x = XVECEXP (pat, 0, i);
5148 if (GET_CODE (x) == SET)
5149 {
5150 if (set)
5151 return 0;
5152 set = x;
5153 }
5154 }
5155 if (!set)
5156 return 0;
5157 }
5158 else
5159 return 0;
5160
e548c9df 5161 cost = set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)), speed);
6fd21094
RS
5162 return cost > 0 ? cost : COSTS_N_INSNS (1);
5163}
75473b02 5164
11204b2d
ZC
5165/* Returns estimate on cost of computing SEQ. */
5166
5167unsigned
5168seq_cost (const rtx_insn *seq, bool speed)
5169{
5170 unsigned cost = 0;
5171 rtx set;
5172
5173 for (; seq; seq = NEXT_INSN (seq))
5174 {
5175 set = single_set (seq);
5176 if (set)
5177 cost += set_rtx_cost (set, speed);
5178 else
5179 cost++;
5180 }
5181
5182 return cost;
5183}
5184
75473b02
SB
5185/* Given an insn INSN and condition COND, return the condition in a
5186 canonical form to simplify testing by callers. Specifically:
5187
5188 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
5189 (2) Both operands will be machine operands; (cc0) will have been replaced.
5190 (3) If an operand is a constant, it will be the second operand.
5191 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
5192 for GE, GEU, and LEU.
5193
5194 If the condition cannot be understood, or is an inequality floating-point
5195 comparison which needs to be reversed, 0 will be returned.
5196
5197 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
5198
5199 If EARLIEST is nonzero, it is a pointer to a place where the earliest
5200 insn used in locating the condition was found. If a replacement test
5201 of the condition is desired, it should be placed in front of that
5202 insn and we will be sure that the inputs are still valid.
5203
5204 If WANT_REG is nonzero, we wish the condition to be relative to that
5205 register, if possible. Therefore, do not canonicalize the condition
b8698a0f 5206 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
75473b02
SB
5207 to be a compare to a CC mode register.
5208
5209 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
5210 and at INSN. */
5211
5212rtx
61aa0978
DM
5213canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
5214 rtx_insn **earliest,
75473b02
SB
5215 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
5216{
5217 enum rtx_code code;
61aa0978 5218 rtx_insn *prev = insn;
f7d504c2 5219 const_rtx set;
75473b02
SB
5220 rtx tem;
5221 rtx op0, op1;
5222 int reverse_code = 0;
ef4bddc2 5223 machine_mode mode;
569f8d98 5224 basic_block bb = BLOCK_FOR_INSN (insn);
75473b02
SB
5225
5226 code = GET_CODE (cond);
5227 mode = GET_MODE (cond);
5228 op0 = XEXP (cond, 0);
5229 op1 = XEXP (cond, 1);
5230
5231 if (reverse)
5232 code = reversed_comparison_code (cond, insn);
5233 if (code == UNKNOWN)
5234 return 0;
5235
5236 if (earliest)
5237 *earliest = insn;
5238
5239 /* If we are comparing a register with zero, see if the register is set
5240 in the previous insn to a COMPARE or a comparison operation. Perform
5241 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
5242 in cse.c */
5243
5244 while ((GET_RTX_CLASS (code) == RTX_COMPARE
5245 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
5246 && op1 == CONST0_RTX (GET_MODE (op0))
5247 && op0 != want_reg)
5248 {
5249 /* Set nonzero when we find something of interest. */
5250 rtx x = 0;
5251
75473b02
SB
5252 /* If comparison with cc0, import actual comparison from compare
5253 insn. */
5254 if (op0 == cc0_rtx)
5255 {
5256 if ((prev = prev_nonnote_insn (prev)) == 0
5257 || !NONJUMP_INSN_P (prev)
5258 || (set = single_set (prev)) == 0
5259 || SET_DEST (set) != cc0_rtx)
5260 return 0;
5261
5262 op0 = SET_SRC (set);
5263 op1 = CONST0_RTX (GET_MODE (op0));
5264 if (earliest)
5265 *earliest = prev;
5266 }
75473b02
SB
5267
5268 /* If this is a COMPARE, pick up the two things being compared. */
5269 if (GET_CODE (op0) == COMPARE)
5270 {
5271 op1 = XEXP (op0, 1);
5272 op0 = XEXP (op0, 0);
5273 continue;
5274 }
5275 else if (!REG_P (op0))
5276 break;
5277
5278 /* Go back to the previous insn. Stop if it is not an INSN. We also
5279 stop if it isn't a single set or if it has a REG_INC note because
5280 we don't want to bother dealing with it. */
5281
f0fc0803 5282 prev = prev_nonnote_nondebug_insn (prev);
b5b8b0ac
AO
5283
5284 if (prev == 0
75473b02 5285 || !NONJUMP_INSN_P (prev)
569f8d98
ZD
5286 || FIND_REG_INC_NOTE (prev, NULL_RTX)
5287 /* In cfglayout mode, there do not have to be labels at the
5288 beginning of a block, or jumps at the end, so the previous
5289 conditions would not stop us when we reach bb boundary. */
5290 || BLOCK_FOR_INSN (prev) != bb)
75473b02
SB
5291 break;
5292
5293 set = set_of (op0, prev);
5294
5295 if (set
5296 && (GET_CODE (set) != SET
5297 || !rtx_equal_p (SET_DEST (set), op0)))
5298 break;
5299
5300 /* If this is setting OP0, get what it sets it to if it looks
5301 relevant. */
5302 if (set)
5303 {
ef4bddc2 5304 machine_mode inner_mode = GET_MODE (SET_DEST (set));
75473b02
SB
5305#ifdef FLOAT_STORE_FLAG_VALUE
5306 REAL_VALUE_TYPE fsfv;
5307#endif
5308
5309 /* ??? We may not combine comparisons done in a CCmode with
5310 comparisons not done in a CCmode. This is to aid targets
5311 like Alpha that have an IEEE compliant EQ instruction, and
5312 a non-IEEE compliant BEQ instruction. The use of CCmode is
5313 actually artificial, simply to prevent the combination, but
5314 should not affect other platforms.
5315
5316 However, we must allow VOIDmode comparisons to match either
5317 CCmode or non-CCmode comparison, because some ports have
5318 modeless comparisons inside branch patterns.
5319
5320 ??? This mode check should perhaps look more like the mode check
5321 in simplify_comparison in combine. */
2c8798a2
RS
5322 if (((GET_MODE_CLASS (mode) == MODE_CC)
5323 != (GET_MODE_CLASS (inner_mode) == MODE_CC))
5324 && mode != VOIDmode
5325 && inner_mode != VOIDmode)
5326 break;
5327 if (GET_CODE (SET_SRC (set)) == COMPARE
5328 || (((code == NE
5329 || (code == LT
5330 && val_signbit_known_set_p (inner_mode,
5331 STORE_FLAG_VALUE))
75473b02 5332#ifdef FLOAT_STORE_FLAG_VALUE
2c8798a2
RS
5333 || (code == LT
5334 && SCALAR_FLOAT_MODE_P (inner_mode)
5335 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5336 REAL_VALUE_NEGATIVE (fsfv)))
75473b02 5337#endif
2c8798a2
RS
5338 ))
5339 && COMPARISON_P (SET_SRC (set))))
75473b02
SB
5340 x = SET_SRC (set);
5341 else if (((code == EQ
5342 || (code == GE
2d0c270f
BS
5343 && val_signbit_known_set_p (inner_mode,
5344 STORE_FLAG_VALUE))
75473b02
SB
5345#ifdef FLOAT_STORE_FLAG_VALUE
5346 || (code == GE
3d8bf70f 5347 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
5348 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5349 REAL_VALUE_NEGATIVE (fsfv)))
5350#endif
5351 ))
2c8798a2 5352 && COMPARISON_P (SET_SRC (set)))
75473b02
SB
5353 {
5354 reverse_code = 1;
5355 x = SET_SRC (set);
5356 }
2c8798a2
RS
5357 else if ((code == EQ || code == NE)
5358 && GET_CODE (SET_SRC (set)) == XOR)
5359 /* Handle sequences like:
5360
5361 (set op0 (xor X Y))
5362 ...(eq|ne op0 (const_int 0))...
5363
5364 in which case:
5365
5366 (eq op0 (const_int 0)) reduces to (eq X Y)
5367 (ne op0 (const_int 0)) reduces to (ne X Y)
5368
5369 This is the form used by MIPS16, for example. */
5370 x = SET_SRC (set);
75473b02
SB
5371 else
5372 break;
5373 }
5374
5375 else if (reg_set_p (op0, prev))
5376 /* If this sets OP0, but not directly, we have to give up. */
5377 break;
5378
5379 if (x)
5380 {
5381 /* If the caller is expecting the condition to be valid at INSN,
5382 make sure X doesn't change before INSN. */
5383 if (valid_at_insn_p)
5384 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5385 break;
5386 if (COMPARISON_P (x))
5387 code = GET_CODE (x);
5388 if (reverse_code)
5389 {
5390 code = reversed_comparison_code (x, prev);
5391 if (code == UNKNOWN)
5392 return 0;
5393 reverse_code = 0;
5394 }
5395
5396 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5397 if (earliest)
5398 *earliest = prev;
5399 }
5400 }
5401
5402 /* If constant is first, put it last. */
5403 if (CONSTANT_P (op0))
5404 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5405
5406 /* If OP0 is the result of a comparison, we weren't able to find what
5407 was really being compared, so fail. */
5408 if (!allow_cc_mode
5409 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5410 return 0;
5411
5412 /* Canonicalize any ordered comparison with integers involving equality
5413 if we can do computations in the relevant mode and we do not
5414 overflow. */
5415
5416 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
481683e1 5417 && CONST_INT_P (op1)
75473b02 5418 && GET_MODE (op0) != VOIDmode
5511bc5a 5419 && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
75473b02
SB
5420 {
5421 HOST_WIDE_INT const_val = INTVAL (op1);
5422 unsigned HOST_WIDE_INT uconst_val = const_val;
5423 unsigned HOST_WIDE_INT max_val
5424 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5425
5426 switch (code)
5427 {
5428 case LE:
5429 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5430 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5431 break;
5432
5433 /* When cross-compiling, const_val might be sign-extended from
5434 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5435 case GE:
c04fc4f0
EB
5436 if ((const_val & max_val)
5437 != ((unsigned HOST_WIDE_INT) 1
5511bc5a 5438 << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
75473b02
SB
5439 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5440 break;
5441
5442 case LEU:
5443 if (uconst_val < max_val)
5444 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5445 break;
5446
5447 case GEU:
5448 if (uconst_val != 0)
5449 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5450 break;
5451
5452 default:
5453 break;
5454 }
5455 }
5456
5457 /* Never return CC0; return zero instead. */
5458 if (CC0_P (op0))
5459 return 0;
5460
5461 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5462}
5463
5464/* Given a jump insn JUMP, return the condition that will cause it to branch
5465 to its JUMP_LABEL. If the condition cannot be understood, or is an
5466 inequality floating-point comparison which needs to be reversed, 0 will
5467 be returned.
5468
5469 If EARLIEST is nonzero, it is a pointer to a place where the earliest
5470 insn used in locating the condition was found. If a replacement test
5471 of the condition is desired, it should be placed in front of that
5472 insn and we will be sure that the inputs are still valid. If EARLIEST
5473 is null, the returned condition will be valid at INSN.
5474
5475 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5476 compare CC mode register.
5477
5478 VALID_AT_INSN_P is the same as for canonicalize_condition. */
5479
5480rtx
61aa0978
DM
5481get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
5482 int valid_at_insn_p)
75473b02
SB
5483{
5484 rtx cond;
5485 int reverse;
5486 rtx set;
5487
5488 /* If this is not a standard conditional jump, we can't parse it. */
5489 if (!JUMP_P (jump)
5490 || ! any_condjump_p (jump))
5491 return 0;
5492 set = pc_set (jump);
5493
5494 cond = XEXP (SET_SRC (set), 0);
5495
5496 /* If this branches to JUMP_LABEL when the condition is false, reverse
5497 the condition. */
5498 reverse
5499 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
a827d9b1 5500 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
75473b02
SB
5501
5502 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5503 allow_cc_mode, valid_at_insn_p);
5504}
5505
b12cbf2c
AN
5506/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5507 TARGET_MODE_REP_EXTENDED.
5508
5509 Note that we assume that the property of
5510 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5511 narrower than mode B. I.e., if A is a mode narrower than B then in
5512 order to be able to operate on it in mode B, mode A needs to
5513 satisfy the requirements set by the representation of mode B. */
5514
5515static void
5516init_num_sign_bit_copies_in_rep (void)
5517{
ef4bddc2 5518 machine_mode mode, in_mode;
b12cbf2c
AN
5519
5520 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5521 in_mode = GET_MODE_WIDER_MODE (mode))
5522 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5523 mode = GET_MODE_WIDER_MODE (mode))
5524 {
ef4bddc2 5525 machine_mode i;
b12cbf2c
AN
5526
5527 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5528 extends to the next widest mode. */
5529 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5530 || GET_MODE_WIDER_MODE (mode) == in_mode);
5531
5532 /* We are in in_mode. Count how many bits outside of mode
5533 have to be copies of the sign-bit. */
5534 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5535 {
ef4bddc2 5536 machine_mode wider = GET_MODE_WIDER_MODE (i);
b12cbf2c
AN
5537
5538 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5539 /* We can only check sign-bit copies starting from the
5540 top-bit. In order to be able to check the bits we
5541 have already seen we pretend that subsequent bits
5542 have to be sign-bit copies too. */
5543 || num_sign_bit_copies_in_rep [in_mode][mode])
5544 num_sign_bit_copies_in_rep [in_mode][mode]
5511bc5a 5545 += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
b12cbf2c
AN
5546 }
5547 }
5548}
5549
d3b72690
PB
5550/* Suppose that truncation from the machine mode of X to MODE is not a
5551 no-op. See if there is anything special about X so that we can
5552 assume it already contains a truncated value of MODE. */
5553
5554bool
ef4bddc2 5555truncated_to_mode (machine_mode mode, const_rtx x)
d3b72690 5556{
b12cbf2c
AN
5557 /* This register has already been used in MODE without explicit
5558 truncation. */
5559 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5560 return true;
5561
5562 /* See if we already satisfy the requirements of MODE. If yes we
5563 can just switch to MODE. */
5564 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5565 && (num_sign_bit_copies (x, GET_MODE (x))
5566 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5567 return true;
d3b72690 5568
b12cbf2c
AN
5569 return false;
5570}
cf94b0fc 5571\f
476dd0ce
RS
5572/* Return true if RTX code CODE has a single sequence of zero or more
5573 "e" operands and no rtvec operands. Initialize its rtx_all_subrtx_bounds
5574 entry in that case. */
5575
5576static bool
5577setup_reg_subrtx_bounds (unsigned int code)
5578{
5579 const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
5580 unsigned int i = 0;
5581 for (; format[i] != 'e'; ++i)
5582 {
5583 if (!format[i])
5584 /* No subrtxes. Leave start and count as 0. */
5585 return true;
5586 if (format[i] == 'E' || format[i] == 'V')
5587 return false;
5588 }
5589
5590 /* Record the sequence of 'e's. */
5591 rtx_all_subrtx_bounds[code].start = i;
5592 do
5593 ++i;
5594 while (format[i] == 'e');
5595 rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
5596 /* rtl-iter.h relies on this. */
5597 gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
5598
5599 for (; format[i]; ++i)
5600 if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
5601 return false;
5602
5603 return true;
5604}
5605
e02101ff 5606/* Initialize rtx_all_subrtx_bounds. */
cf94b0fc
PB
5607void
5608init_rtlanal (void)
5609{
5610 int i;
5611 for (i = 0; i < NUM_RTX_CODE; i++)
5612 {
476dd0ce
RS
5613 if (!setup_reg_subrtx_bounds (i))
5614 rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
5615 if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
5616 rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
cf94b0fc 5617 }
b12cbf2c
AN
5618
5619 init_num_sign_bit_copies_in_rep ();
cf94b0fc 5620}
3d8504ac
RS
5621\f
5622/* Check whether this is a constant pool constant. */
5623bool
5624constant_pool_constant_p (rtx x)
5625{
5626 x = avoid_constant_pool_reference (x);
48175537 5627 return CONST_DOUBLE_P (x);
3d8504ac 5628}
842e098c
AN
5629\f
5630/* If M is a bitmask that selects a field of low-order bits within an item but
5631 not the entire word, return the length of the field. Return -1 otherwise.
5632 M is used in machine mode MODE. */
5633
5634int
ef4bddc2 5635low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
842e098c
AN
5636{
5637 if (mode != VOIDmode)
5638 {
5511bc5a 5639 if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
842e098c
AN
5640 return -1;
5641 m &= GET_MODE_MASK (mode);
5642 }
5643
5644 return exact_log2 (m + 1);
5645}
372d6395
RS
5646
5647/* Return the mode of MEM's address. */
5648
ef4bddc2 5649machine_mode
372d6395
RS
5650get_address_mode (rtx mem)
5651{
ef4bddc2 5652 machine_mode mode;
372d6395
RS
5653
5654 gcc_assert (MEM_P (mem));
5655 mode = GET_MODE (XEXP (mem, 0));
5656 if (mode != VOIDmode)
5657 return mode;
5658 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
5659}
ca3f2950
SB
5660\f
5661/* Split up a CONST_DOUBLE or integer constant rtx
5662 into two rtx's for single words,
5663 storing in *FIRST the word that comes first in memory in the target
807e902e
KZ
5664 and in *SECOND the other.
5665
5666 TODO: This function needs to be rewritten to work on any size
5667 integer. */
ca3f2950
SB
5668
5669void
5670split_double (rtx value, rtx *first, rtx *second)
5671{
5672 if (CONST_INT_P (value))
5673 {
5674 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
5675 {
5676 /* In this case the CONST_INT holds both target words.
5677 Extract the bits from it into two word-sized pieces.
5678 Sign extend each half to HOST_WIDE_INT. */
5679 unsigned HOST_WIDE_INT low, high;
5680 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
5681 unsigned bits_per_word = BITS_PER_WORD;
5682
5683 /* Set sign_bit to the most significant bit of a word. */
5684 sign_bit = 1;
5685 sign_bit <<= bits_per_word - 1;
5686
5687 /* Set mask so that all bits of the word are set. We could
5688 have used 1 << BITS_PER_WORD instead of basing the
5689 calculation on sign_bit. However, on machines where
5690 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
5691 compiler warning, even though the code would never be
5692 executed. */
5693 mask = sign_bit << 1;
5694 mask--;
5695
5696 /* Set sign_extend as any remaining bits. */
5697 sign_extend = ~mask;
5698
5699 /* Pick the lower word and sign-extend it. */
5700 low = INTVAL (value);
5701 low &= mask;
5702 if (low & sign_bit)
5703 low |= sign_extend;
5704
5705 /* Pick the higher word, shifted to the least significant
5706 bits, and sign-extend it. */
5707 high = INTVAL (value);
5708 high >>= bits_per_word - 1;
5709 high >>= 1;
5710 high &= mask;
5711 if (high & sign_bit)
5712 high |= sign_extend;
5713
5714 /* Store the words in the target machine order. */
5715 if (WORDS_BIG_ENDIAN)
5716 {
5717 *first = GEN_INT (high);
5718 *second = GEN_INT (low);
5719 }
5720 else
5721 {
5722 *first = GEN_INT (low);
5723 *second = GEN_INT (high);
5724 }
5725 }
5726 else
5727 {
5728 /* The rule for using CONST_INT for a wider mode
5729 is that we regard the value as signed.
5730 So sign-extend it. */
5731 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
5732 if (WORDS_BIG_ENDIAN)
5733 {
5734 *first = high;
5735 *second = value;
5736 }
5737 else
5738 {
5739 *first = value;
5740 *second = high;
5741 }
5742 }
5743 }
807e902e
KZ
5744 else if (GET_CODE (value) == CONST_WIDE_INT)
5745 {
5746 /* All of this is scary code and needs to be converted to
5747 properly work with any size integer. */
5748 gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
5749 if (WORDS_BIG_ENDIAN)
5750 {
5751 *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5752 *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5753 }
5754 else
5755 {
5756 *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5757 *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5758 }
5759 }
48175537 5760 else if (!CONST_DOUBLE_P (value))
ca3f2950
SB
5761 {
5762 if (WORDS_BIG_ENDIAN)
5763 {
5764 *first = const0_rtx;
5765 *second = value;
5766 }
5767 else
5768 {
5769 *first = value;
5770 *second = const0_rtx;
5771 }
5772 }
5773 else if (GET_MODE (value) == VOIDmode
5774 /* This is the old way we did CONST_DOUBLE integers. */
5775 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
5776 {
5777 /* In an integer, the words are defined as most and least significant.
5778 So order them by the target's convention. */
5779 if (WORDS_BIG_ENDIAN)
5780 {
5781 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
5782 *second = GEN_INT (CONST_DOUBLE_LOW (value));
5783 }
5784 else
5785 {
5786 *first = GEN_INT (CONST_DOUBLE_LOW (value));
5787 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
5788 }
5789 }
5790 else
5791 {
5792 REAL_VALUE_TYPE r;
5793 long l[2];
5794 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
5795
5796 /* Note, this converts the REAL_VALUE_TYPE to the target's
5797 format, splits up the floating point double and outputs
5798 exactly 32 bits of it into each of l[0] and l[1] --
5799 not necessarily BITS_PER_WORD bits. */
5800 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
5801
5802 /* If 32 bits is an entire word for the target, but not for the host,
5803 then sign-extend on the host so that the number will look the same
5804 way on the host that it would on the target. See for instance
5805 simplify_unary_operation. The #if is needed to avoid compiler
5806 warnings. */
5807
5808#if HOST_BITS_PER_LONG > 32
5809 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
5810 {
5811 if (l[0] & ((long) 1 << 31))
5812 l[0] |= ((long) (-1) << 32);
5813 if (l[1] & ((long) 1 << 31))
5814 l[1] |= ((long) (-1) << 32);
5815 }
5816#endif
5817
5818 *first = GEN_INT (l[0]);
5819 *second = GEN_INT (l[1]);
5820 }
5821}
5822
3936bafc
YR
5823/* Return true if X is a sign_extract or zero_extract from the least
5824 significant bit. */
5825
5826static bool
5827lsb_bitfield_op_p (rtx x)
5828{
5829 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
5830 {
ef4bddc2 5831 machine_mode mode = GET_MODE (XEXP (x, 0));
a9195970 5832 HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
3936bafc
YR
5833 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
5834
5835 return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0));
5836 }
5837 return false;
5838}
5839
277f65de
RS
5840/* Strip outer address "mutations" from LOC and return a pointer to the
5841 inner value. If OUTER_CODE is nonnull, store the code of the innermost
5842 stripped expression there.
5843
5844 "Mutations" either convert between modes or apply some kind of
3936bafc 5845 extension, truncation or alignment. */
277f65de
RS
5846
5847rtx *
5848strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
5849{
5850 for (;;)
5851 {
5852 enum rtx_code code = GET_CODE (*loc);
5853 if (GET_RTX_CLASS (code) == RTX_UNARY)
5854 /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
5855 used to convert between pointer sizes. */
5856 loc = &XEXP (*loc, 0);
3936bafc
YR
5857 else if (lsb_bitfield_op_p (*loc))
5858 /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
5859 acts as a combined truncation and extension. */
5860 loc = &XEXP (*loc, 0);
277f65de
RS
5861 else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
5862 /* (and ... (const_int -X)) is used to align to X bytes. */
5863 loc = &XEXP (*loc, 0);
163497f1
VM
5864 else if (code == SUBREG
5865 && !OBJECT_P (SUBREG_REG (*loc))
5866 && subreg_lowpart_p (*loc))
5867 /* (subreg (operator ...) ...) inside and is used for mode
5868 conversion too. */
99a0106f 5869 loc = &SUBREG_REG (*loc);
277f65de
RS
5870 else
5871 return loc;
5872 if (outer_code)
5873 *outer_code = code;
5874 }
5875}
5876
ec5a3504
RS
5877/* Return true if CODE applies some kind of scale. The scaled value is
5878 is the first operand and the scale is the second. */
277f65de
RS
5879
5880static bool
ec5a3504 5881binary_scale_code_p (enum rtx_code code)
277f65de 5882{
ec5a3504
RS
5883 return (code == MULT
5884 || code == ASHIFT
5885 /* Needed by ARM targets. */
5886 || code == ASHIFTRT
5887 || code == LSHIFTRT
5888 || code == ROTATE
5889 || code == ROTATERT);
277f65de
RS
5890}
5891
ec5a3504
RS
5892/* If *INNER can be interpreted as a base, return a pointer to the inner term
5893 (see address_info). Return null otherwise. */
277f65de 5894
ec5a3504
RS
5895static rtx *
5896get_base_term (rtx *inner)
277f65de 5897{
ec5a3504
RS
5898 if (GET_CODE (*inner) == LO_SUM)
5899 inner = strip_address_mutations (&XEXP (*inner, 0));
5900 if (REG_P (*inner)
5901 || MEM_P (*inner)
948cd9a5
MK
5902 || GET_CODE (*inner) == SUBREG
5903 || GET_CODE (*inner) == SCRATCH)
ec5a3504
RS
5904 return inner;
5905 return 0;
5906}
5907
5908/* If *INNER can be interpreted as an index, return a pointer to the inner term
5909 (see address_info). Return null otherwise. */
5910
5911static rtx *
5912get_index_term (rtx *inner)
5913{
5914 /* At present, only constant scales are allowed. */
5915 if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
5916 inner = strip_address_mutations (&XEXP (*inner, 0));
5917 if (REG_P (*inner)
5918 || MEM_P (*inner)
71acd477
MK
5919 || GET_CODE (*inner) == SUBREG
5920 || GET_CODE (*inner) == SCRATCH)
ec5a3504
RS
5921 return inner;
5922 return 0;
277f65de
RS
5923}
5924
5925/* Set the segment part of address INFO to LOC, given that INNER is the
5926 unmutated value. */
5927
5928static void
5929set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
5930{
277f65de
RS
5931 gcc_assert (!info->segment);
5932 info->segment = loc;
5933 info->segment_term = inner;
5934}
5935
5936/* Set the base part of address INFO to LOC, given that INNER is the
5937 unmutated value. */
5938
5939static void
5940set_address_base (struct address_info *info, rtx *loc, rtx *inner)
5941{
277f65de
RS
5942 gcc_assert (!info->base);
5943 info->base = loc;
5944 info->base_term = inner;
5945}
5946
5947/* Set the index part of address INFO to LOC, given that INNER is the
5948 unmutated value. */
5949
5950static void
5951set_address_index (struct address_info *info, rtx *loc, rtx *inner)
5952{
277f65de
RS
5953 gcc_assert (!info->index);
5954 info->index = loc;
5955 info->index_term = inner;
5956}
5957
5958/* Set the displacement part of address INFO to LOC, given that INNER
5959 is the constant term. */
5960
5961static void
5962set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
5963{
277f65de
RS
5964 gcc_assert (!info->disp);
5965 info->disp = loc;
5966 info->disp_term = inner;
5967}
5968
5969/* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the
5970 rest of INFO accordingly. */
5971
5972static void
5973decompose_incdec_address (struct address_info *info)
5974{
5975 info->autoinc_p = true;
5976
5977 rtx *base = &XEXP (*info->inner, 0);
5978 set_address_base (info, base, base);
5979 gcc_checking_assert (info->base == info->base_term);
5980
5981 /* These addresses are only valid when the size of the addressed
5982 value is known. */
5983 gcc_checking_assert (info->mode != VOIDmode);
5984}
5985
5986/* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest
5987 of INFO accordingly. */
5988
5989static void
5990decompose_automod_address (struct address_info *info)
5991{
5992 info->autoinc_p = true;
5993
5994 rtx *base = &XEXP (*info->inner, 0);
5995 set_address_base (info, base, base);
5996 gcc_checking_assert (info->base == info->base_term);
5997
5998 rtx plus = XEXP (*info->inner, 1);
5999 gcc_assert (GET_CODE (plus) == PLUS);
6000
6001 info->base_term2 = &XEXP (plus, 0);
6002 gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
6003
6004 rtx *step = &XEXP (plus, 1);
6005 rtx *inner_step = strip_address_mutations (step);
6006 if (CONSTANT_P (*inner_step))
6007 set_address_disp (info, step, inner_step);
6008 else
6009 set_address_index (info, step, inner_step);
6010}
6011
6012/* Treat *LOC as a tree of PLUS operands and store pointers to the summed
6013 values in [PTR, END). Return a pointer to the end of the used array. */
6014
6015static rtx **
6016extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
6017{
6018 rtx x = *loc;
6019 if (GET_CODE (x) == PLUS)
6020 {
6021 ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
6022 ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
6023 }
6024 else
6025 {
6026 gcc_assert (ptr != end);
6027 *ptr++ = loc;
6028 }
6029 return ptr;
6030}
6031
6032/* Evaluate the likelihood of X being a base or index value, returning
6033 positive if it is likely to be a base, negative if it is likely to be
6034 an index, and 0 if we can't tell. Make the magnitude of the return
6035 value reflect the amount of confidence we have in the answer.
6036
6037 MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */
6038
6039static int
ef4bddc2 6040baseness (rtx x, machine_mode mode, addr_space_t as,
277f65de
RS
6041 enum rtx_code outer_code, enum rtx_code index_code)
6042{
277f65de
RS
6043 /* Believe *_POINTER unless the address shape requires otherwise. */
6044 if (REG_P (x) && REG_POINTER (x))
6045 return 2;
6046 if (MEM_P (x) && MEM_POINTER (x))
6047 return 2;
6048
6049 if (REG_P (x) && HARD_REGISTER_P (x))
6050 {
6051 /* X is a hard register. If it only fits one of the base
6052 or index classes, choose that interpretation. */
6053 int regno = REGNO (x);
6054 bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
6055 bool index_p = REGNO_OK_FOR_INDEX_P (regno);
6056 if (base_p != index_p)
6057 return base_p ? 1 : -1;
6058 }
6059 return 0;
6060}
6061
6062/* INFO->INNER describes a normal, non-automodified address.
6063 Fill in the rest of INFO accordingly. */
6064
6065static void
6066decompose_normal_address (struct address_info *info)
6067{
6068 /* Treat the address as the sum of up to four values. */
6069 rtx *ops[4];
6070 size_t n_ops = extract_plus_operands (info->inner, ops,
6071 ops + ARRAY_SIZE (ops)) - ops;
6072
6073 /* If there is more than one component, any base component is in a PLUS. */
6074 if (n_ops > 1)
6075 info->base_outer_code = PLUS;
6076
ec5a3504
RS
6077 /* Try to classify each sum operand now. Leave those that could be
6078 either a base or an index in OPS. */
277f65de
RS
6079 rtx *inner_ops[4];
6080 size_t out = 0;
6081 for (size_t in = 0; in < n_ops; ++in)
6082 {
6083 rtx *loc = ops[in];
6084 rtx *inner = strip_address_mutations (loc);
6085 if (CONSTANT_P (*inner))
6086 set_address_disp (info, loc, inner);
6087 else if (GET_CODE (*inner) == UNSPEC)
6088 set_address_segment (info, loc, inner);
6089 else
6090 {
ec5a3504
RS
6091 /* The only other possibilities are a base or an index. */
6092 rtx *base_term = get_base_term (inner);
6093 rtx *index_term = get_index_term (inner);
6094 gcc_assert (base_term || index_term);
6095 if (!base_term)
6096 set_address_index (info, loc, index_term);
6097 else if (!index_term)
6098 set_address_base (info, loc, base_term);
6099 else
6100 {
6101 gcc_assert (base_term == index_term);
6102 ops[out] = loc;
6103 inner_ops[out] = base_term;
6104 ++out;
6105 }
277f65de
RS
6106 }
6107 }
6108
6109 /* Classify the remaining OPS members as bases and indexes. */
6110 if (out == 1)
6111 {
ec5a3504
RS
6112 /* If we haven't seen a base or an index yet, assume that this is
6113 the base. If we were confident that another term was the base
6114 or index, treat the remaining operand as the other kind. */
6115 if (!info->base)
277f65de
RS
6116 set_address_base (info, ops[0], inner_ops[0]);
6117 else
6118 set_address_index (info, ops[0], inner_ops[0]);
6119 }
6120 else if (out == 2)
6121 {
6122 /* In the event of a tie, assume the base comes first. */
6123 if (baseness (*inner_ops[0], info->mode, info->as, PLUS,
6124 GET_CODE (*ops[1]))
6125 >= baseness (*inner_ops[1], info->mode, info->as, PLUS,
6126 GET_CODE (*ops[0])))
6127 {
6128 set_address_base (info, ops[0], inner_ops[0]);
6129 set_address_index (info, ops[1], inner_ops[1]);
6130 }
6131 else
6132 {
6133 set_address_base (info, ops[1], inner_ops[1]);
6134 set_address_index (info, ops[0], inner_ops[0]);
6135 }
6136 }
6137 else
6138 gcc_assert (out == 0);
6139}
6140
6141/* Describe address *LOC in *INFO. MODE is the mode of the addressed value,
6142 or VOIDmode if not known. AS is the address space associated with LOC.
6143 OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */
6144
6145void
ef4bddc2 6146decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
277f65de
RS
6147 addr_space_t as, enum rtx_code outer_code)
6148{
6149 memset (info, 0, sizeof (*info));
6150 info->mode = mode;
6151 info->as = as;
6152 info->addr_outer_code = outer_code;
6153 info->outer = loc;
6154 info->inner = strip_address_mutations (loc, &outer_code);
6155 info->base_outer_code = outer_code;
6156 switch (GET_CODE (*info->inner))
6157 {
6158 case PRE_DEC:
6159 case PRE_INC:
6160 case POST_DEC:
6161 case POST_INC:
6162 decompose_incdec_address (info);
6163 break;
6164
6165 case PRE_MODIFY:
6166 case POST_MODIFY:
6167 decompose_automod_address (info);
6168 break;
6169
6170 default:
6171 decompose_normal_address (info);
6172 break;
6173 }
6174}
6175
6176/* Describe address operand LOC in INFO. */
6177
6178void
6179decompose_lea_address (struct address_info *info, rtx *loc)
6180{
6181 decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
6182}
6183
6184/* Describe the address of MEM X in INFO. */
6185
6186void
6187decompose_mem_address (struct address_info *info, rtx x)
6188{
6189 gcc_assert (MEM_P (x));
6190 decompose_address (info, &XEXP (x, 0), GET_MODE (x),
6191 MEM_ADDR_SPACE (x), MEM);
6192}
6193
6194/* Update INFO after a change to the address it describes. */
6195
6196void
6197update_address (struct address_info *info)
6198{
6199 decompose_address (info, info->outer, info->mode, info->as,
6200 info->addr_outer_code);
6201}
6202
6203/* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
6204 more complicated than that. */
6205
6206HOST_WIDE_INT
6207get_index_scale (const struct address_info *info)
6208{
6209 rtx index = *info->index;
6210 if (GET_CODE (index) == MULT
6211 && CONST_INT_P (XEXP (index, 1))
6212 && info->index_term == &XEXP (index, 0))
6213 return INTVAL (XEXP (index, 1));
6214
6215 if (GET_CODE (index) == ASHIFT
6216 && CONST_INT_P (XEXP (index, 1))
6217 && info->index_term == &XEXP (index, 0))
6218 return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1));
6219
6220 if (info->index == info->index_term)
6221 return 1;
6222
6223 return 0;
6224}
6225
6226/* Return the "index code" of INFO, in the form required by
6227 ok_for_base_p_1. */
6228
6229enum rtx_code
6230get_index_code (const struct address_info *info)
6231{
6232 if (info->index)
6233 return GET_CODE (*info->index);
6234
6235 if (info->disp)
6236 return GET_CODE (*info->disp);
6237
6238 return SCRATCH;
6239}
093a6c99 6240
093a6c99
RS
6241/* Return true if X contains a thread-local symbol. */
6242
6243bool
6180e3d8 6244tls_referenced_p (const_rtx x)
093a6c99
RS
6245{
6246 if (!targetm.have_tls)
6247 return false;
6248
6180e3d8 6249 subrtx_iterator::array_type array;
ebd3cb12 6250 FOR_EACH_SUBRTX (iter, array, x, ALL)
6180e3d8
RS
6251 if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
6252 return true;
6253 return false;
093a6c99 6254}