]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/rtlanal.c
alias.c (nonlocal_mentioned_p, [...]): Use, LABEL_P, JUMP_P, CALL_P, NONJUMP_INSN_P...
[thirdparty/gcc.git] / gcc / rtlanal.c
CommitLineData
2c88418c 1/* Analyze RTL for C-Compiler
af841dbd 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
9ce79a7a 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
2c88418c 4
1322177d 5This file is part of GCC.
2c88418c 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
2c88418c 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
2c88418c
RS
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
2c88418c
RS
21
22
23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
e35b9579 27#include "toplev.h"
2c88418c 28#include "rtl.h"
3335f1d9 29#include "hard-reg-set.h"
bc204393
RH
30#include "insn-config.h"
31#include "recog.h"
f894b69b
PB
32#include "target.h"
33#include "output.h"
91ea4f8d 34#include "tm_p.h"
f5eb5fd0 35#include "flags.h"
71d2c5bd 36#include "basic-block.h"
52bfebf0 37#include "real.h"
66fd46b6 38#include "regs.h"
2f93eea8 39#include "function.h"
2c88418c 40
e2373f95 41/* Forward declarations */
0c20a65f
AJ
42static int global_reg_mentioned_p_1 (rtx *, void *);
43static void set_of_1 (rtx, rtx, void *);
44static void insn_dependent_p_1 (rtx, rtx, void *);
45static int rtx_referenced_p_1 (rtx *, void *);
46static int computed_jump_p_1 (rtx);
47static void parms_set (rtx, rtx, void *);
48static bool hoist_test_store (rtx, rtx, regset);
49static void hoist_update_store (rtx, rtx *, rtx, rtx);
2a1777af 50
2f93eea8
PB
51static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
52 rtx, enum machine_mode,
53 unsigned HOST_WIDE_INT);
54static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
55 enum machine_mode,
56 unsigned HOST_WIDE_INT);
57static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
58 enum machine_mode,
59 unsigned int);
60static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
61 enum machine_mode, unsigned int);
62
2c88418c
RS
63/* Bit flags that specify the machine subtype we are compiling for.
64 Bits are tested using macros TARGET_... defined in the tm.h file
65 and set by `-m...' switches. Must be defined in rtlanal.c. */
66
67int target_flags;
68\f
69/* Return 1 if the value of X is unstable
70 (would be different at a different point in the program).
71 The frame pointer, arg pointer, etc. are considered stable
72 (within one function) and so is anything marked `unchanging'. */
73
74int
0c20a65f 75rtx_unstable_p (rtx x)
2c88418c 76{
b3694847
SS
77 RTX_CODE code = GET_CODE (x);
78 int i;
79 const char *fmt;
2c88418c 80
ae0fb1b9
JW
81 switch (code)
82 {
83 case MEM:
84 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
2c88418c 85
ae0fb1b9
JW
86 case QUEUED:
87 return 1;
2c88418c 88
ae0fb1b9
JW
89 case CONST:
90 case CONST_INT:
91 case CONST_DOUBLE:
69ef87e2 92 case CONST_VECTOR:
ae0fb1b9
JW
93 case SYMBOL_REF:
94 case LABEL_REF:
95 return 0;
2c88418c 96
ae0fb1b9
JW
97 case REG:
98 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
c0fc376b 99 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9
JL
100 /* The arg pointer varies if it is not a fixed register. */
101 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
102 || RTX_UNCHANGING_P (x))
c0fc376b
RH
103 return 0;
104#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
105 /* ??? When call-clobbered, the value is stable modulo the restore
106 that must happen after a call. This currently screws up local-alloc
107 into believing that the restore is not needed. */
108 if (x == pic_offset_table_rtx)
109 return 0;
110#endif
111 return 1;
ae0fb1b9
JW
112
113 case ASM_OPERANDS:
114 if (MEM_VOLATILE_P (x))
115 return 1;
116
5d3cc252 117 /* Fall through. */
ae0fb1b9
JW
118
119 default:
120 break;
121 }
2c88418c
RS
122
123 fmt = GET_RTX_FORMAT (code);
124 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
125 if (fmt[i] == 'e')
9c82ac6b
JW
126 {
127 if (rtx_unstable_p (XEXP (x, i)))
128 return 1;
129 }
130 else if (fmt[i] == 'E')
131 {
132 int j;
133 for (j = 0; j < XVECLEN (x, i); j++)
134 if (rtx_unstable_p (XVECEXP (x, i, j)))
135 return 1;
136 }
137
2c88418c
RS
138 return 0;
139}
140
141/* Return 1 if X has a value that can vary even between two
142 executions of the program. 0 means X can be compared reliably
143 against certain constants or near-constants.
e38fe8e0
BS
144 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
145 zero, we are slightly more conservative.
2c88418c
RS
146 The frame pointer and the arg pointer are considered constant. */
147
148int
0c20a65f 149rtx_varies_p (rtx x, int for_alias)
2c88418c 150{
e978d62e 151 RTX_CODE code;
b3694847
SS
152 int i;
153 const char *fmt;
2c88418c 154
e978d62e
PB
155 if (!x)
156 return 0;
157
158 code = GET_CODE (x);
2c88418c
RS
159 switch (code)
160 {
161 case MEM:
e38fe8e0 162 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
55efb413 163
2c88418c
RS
164 case QUEUED:
165 return 1;
166
167 case CONST:
168 case CONST_INT:
169 case CONST_DOUBLE:
69ef87e2 170 case CONST_VECTOR:
2c88418c
RS
171 case SYMBOL_REF:
172 case LABEL_REF:
173 return 0;
174
175 case REG:
176 /* Note that we have to test for the actual rtx used for the frame
177 and arg pointers and not just the register number in case we have
178 eliminated the frame and/or arg pointer and are using it
179 for pseudos. */
c0fc376b 180 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9
JL
181 /* The arg pointer varies if it is not a fixed register. */
182 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 183 return 0;
e38fe8e0
BS
184 if (x == pic_offset_table_rtx
185#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
186 /* ??? When call-clobbered, the value is stable modulo the restore
187 that must happen after a call. This currently screws up
188 local-alloc into believing that the restore is not needed, so we
189 must return 0 only if we are called from alias analysis. */
190 && for_alias
c0fc376b 191#endif
e38fe8e0
BS
192 )
193 return 0;
c0fc376b 194 return 1;
2c88418c
RS
195
196 case LO_SUM:
197 /* The operand 0 of a LO_SUM is considered constant
e7d96a83
JW
198 (in fact it is related specifically to operand 1)
199 during alias analysis. */
200 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
201 || rtx_varies_p (XEXP (x, 1), for_alias);
a6a2274a 202
ae0fb1b9
JW
203 case ASM_OPERANDS:
204 if (MEM_VOLATILE_P (x))
205 return 1;
206
5d3cc252 207 /* Fall through. */
ae0fb1b9 208
e9a25f70
JL
209 default:
210 break;
2c88418c
RS
211 }
212
213 fmt = GET_RTX_FORMAT (code);
214 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
215 if (fmt[i] == 'e')
9c82ac6b 216 {
e38fe8e0 217 if (rtx_varies_p (XEXP (x, i), for_alias))
9c82ac6b
JW
218 return 1;
219 }
220 else if (fmt[i] == 'E')
221 {
222 int j;
223 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 224 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
9c82ac6b
JW
225 return 1;
226 }
227
2c88418c
RS
228 return 0;
229}
230
231/* Return 0 if the use of X as an address in a MEM can cause a trap. */
232
e38fe8e0 233int
0c20a65f 234rtx_addr_can_trap_p (rtx x)
2c88418c 235{
b3694847 236 enum rtx_code code = GET_CODE (x);
2c88418c
RS
237
238 switch (code)
239 {
240 case SYMBOL_REF:
ff0b6b99
FS
241 return SYMBOL_REF_WEAK (x);
242
2c88418c 243 case LABEL_REF:
2c88418c
RS
244 return 0;
245
246 case REG:
247 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
4f73495e
RH
248 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
249 || x == stack_pointer_rtx
250 /* The arg pointer varies if it is not a fixed register. */
251 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
252 return 0;
253 /* All of the virtual frame registers are stack references. */
254 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
255 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
256 return 0;
257 return 1;
2c88418c
RS
258
259 case CONST:
260 return rtx_addr_can_trap_p (XEXP (x, 0));
261
262 case PLUS:
263 /* An address is assumed not to trap if it is an address that can't
55efb413
JW
264 trap plus a constant integer or it is the pic register plus a
265 constant. */
266 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
267 && GET_CODE (XEXP (x, 1)) == CONST_INT)
268 || (XEXP (x, 0) == pic_offset_table_rtx
269 && CONSTANT_P (XEXP (x, 1))));
2c88418c
RS
270
271 case LO_SUM:
4f73495e 272 case PRE_MODIFY:
2c88418c 273 return rtx_addr_can_trap_p (XEXP (x, 1));
4f73495e
RH
274
275 case PRE_DEC:
276 case PRE_INC:
277 case POST_DEC:
278 case POST_INC:
279 case POST_MODIFY:
280 return rtx_addr_can_trap_p (XEXP (x, 0));
281
e9a25f70
JL
282 default:
283 break;
2c88418c
RS
284 }
285
286 /* If it isn't one of the case above, it can cause a trap. */
287 return 1;
288}
289
4977bab6
ZW
290/* Return true if X is an address that is known to not be zero. */
291
292bool
0c20a65f 293nonzero_address_p (rtx x)
4977bab6
ZW
294{
295 enum rtx_code code = GET_CODE (x);
296
297 switch (code)
298 {
299 case SYMBOL_REF:
300 return !SYMBOL_REF_WEAK (x);
301
302 case LABEL_REF:
303 return true;
304
4977bab6
ZW
305 case REG:
306 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
307 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
308 || x == stack_pointer_rtx
309 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
310 return true;
311 /* All of the virtual frame registers are stack references. */
312 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
313 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
314 return true;
315 return false;
316
317 case CONST:
318 return nonzero_address_p (XEXP (x, 0));
319
320 case PLUS:
321 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
322 {
323 /* Pointers aren't allowed to wrap. If we've got a register
324 that is known to be a pointer, and a positive offset, then
325 the composite can't be zero. */
326 if (INTVAL (XEXP (x, 1)) > 0
327 && REG_P (XEXP (x, 0))
328 && REG_POINTER (XEXP (x, 0)))
329 return true;
330
331 return nonzero_address_p (XEXP (x, 0));
332 }
333 /* Handle PIC references. */
334 else if (XEXP (x, 0) == pic_offset_table_rtx
335 && CONSTANT_P (XEXP (x, 1)))
336 return true;
337 return false;
338
339 case PRE_MODIFY:
340 /* Similar to the above; allow positive offsets. Further, since
341 auto-inc is only allowed in memories, the register must be a
342 pointer. */
343 if (GET_CODE (XEXP (x, 1)) == CONST_INT
344 && INTVAL (XEXP (x, 1)) > 0)
345 return true;
346 return nonzero_address_p (XEXP (x, 0));
347
348 case PRE_INC:
349 /* Similarly. Further, the offset is always positive. */
350 return true;
351
352 case PRE_DEC:
353 case POST_DEC:
354 case POST_INC:
355 case POST_MODIFY:
356 return nonzero_address_p (XEXP (x, 0));
357
358 case LO_SUM:
359 return nonzero_address_p (XEXP (x, 1));
360
361 default:
362 break;
363 }
364
365 /* If it isn't one of the case above, might be zero. */
366 return false;
367}
368
a6a2274a 369/* Return 1 if X refers to a memory location whose address
2c88418c 370 cannot be compared reliably with constant addresses,
a6a2274a 371 or if X refers to a BLKmode memory object.
e38fe8e0
BS
372 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
373 zero, we are slightly more conservative. */
2c88418c
RS
374
375int
0c20a65f 376rtx_addr_varies_p (rtx x, int for_alias)
2c88418c 377{
b3694847
SS
378 enum rtx_code code;
379 int i;
380 const char *fmt;
2c88418c
RS
381
382 if (x == 0)
383 return 0;
384
385 code = GET_CODE (x);
386 if (code == MEM)
e38fe8e0 387 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
2c88418c
RS
388
389 fmt = GET_RTX_FORMAT (code);
390 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
391 if (fmt[i] == 'e')
833c0b26 392 {
e38fe8e0 393 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
833c0b26
RK
394 return 1;
395 }
396 else if (fmt[i] == 'E')
397 {
398 int j;
399 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 400 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
833c0b26
RK
401 return 1;
402 }
2c88418c
RS
403 return 0;
404}
405\f
406/* Return the value of the integer term in X, if one is apparent;
407 otherwise return 0.
408 Only obvious integer terms are detected.
3ef42a0c 409 This is used in cse.c with the `related_value' field. */
2c88418c 410
c166a311 411HOST_WIDE_INT
0c20a65f 412get_integer_term (rtx x)
2c88418c
RS
413{
414 if (GET_CODE (x) == CONST)
415 x = XEXP (x, 0);
416
417 if (GET_CODE (x) == MINUS
418 && GET_CODE (XEXP (x, 1)) == CONST_INT)
419 return - INTVAL (XEXP (x, 1));
420 if (GET_CODE (x) == PLUS
421 && GET_CODE (XEXP (x, 1)) == CONST_INT)
422 return INTVAL (XEXP (x, 1));
423 return 0;
424}
425
426/* If X is a constant, return the value sans apparent integer term;
427 otherwise return 0.
428 Only obvious integer terms are detected. */
429
430rtx
0c20a65f 431get_related_value (rtx x)
2c88418c
RS
432{
433 if (GET_CODE (x) != CONST)
434 return 0;
435 x = XEXP (x, 0);
436 if (GET_CODE (x) == PLUS
437 && GET_CODE (XEXP (x, 1)) == CONST_INT)
438 return XEXP (x, 0);
439 else if (GET_CODE (x) == MINUS
440 && GET_CODE (XEXP (x, 1)) == CONST_INT)
441 return XEXP (x, 0);
442 return 0;
443}
444\f
d644189f
JW
445/* Given a tablejump insn INSN, return the RTL expression for the offset
446 into the jump table. If the offset cannot be determined, then return
447 NULL_RTX.
448
40f03658 449 If EARLIEST is nonzero, it is a pointer to a place where the earliest
d644189f
JW
450 insn used in locating the offset was found. */
451
452rtx
0c20a65f 453get_jump_table_offset (rtx insn, rtx *earliest)
d644189f 454{
6de9cd9a
DN
455 rtx label = NULL;
456 rtx table = NULL;
d644189f
JW
457 rtx set;
458 rtx old_insn;
459 rtx x;
460 rtx old_x;
461 rtx y;
462 rtx old_y;
463 int i;
d644189f 464
e1233a7d 465 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
d644189f
JW
466 return NULL_RTX;
467
468 x = SET_SRC (set);
469
470 /* Some targets (eg, ARM) emit a tablejump that also
471 contains the out-of-range target. */
472 if (GET_CODE (x) == IF_THEN_ELSE
473 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
474 x = XEXP (x, 1);
475
476 /* Search backwards and locate the expression stored in X. */
f8cfc6aa 477 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
d644189f
JW
478 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
479 ;
480
481 /* If X is an expression using a relative address then strip
482 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
483 or the jump table label. */
484 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
485 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
486 {
487 for (i = 0; i < 2; i++)
488 {
489 old_insn = insn;
490 y = XEXP (x, i);
491
492 if (y == pc_rtx || y == pic_offset_table_rtx)
493 break;
494
f8cfc6aa 495 for (old_y = NULL_RTX; REG_P (y) && y != old_y;
d644189f
JW
496 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
497 ;
498
499 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
500 break;
501 }
502
503 if (i >= 2)
504 return NULL_RTX;
505
506 x = XEXP (x, 1 - i);
507
f8cfc6aa 508 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
d644189f
JW
509 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
510 ;
511 }
512
513 /* Strip off any sign or zero extension. */
514 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
515 {
516 x = XEXP (x, 0);
517
f8cfc6aa 518 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
d644189f
JW
519 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
520 ;
521 }
522
523 /* If X isn't a MEM then this isn't a tablejump we understand. */
3c0cb5de 524 if (!MEM_P (x))
d644189f
JW
525 return NULL_RTX;
526
527 /* Strip off the MEM. */
528 x = XEXP (x, 0);
529
f8cfc6aa 530 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
d644189f
JW
531 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
532 ;
533
534 /* If X isn't a PLUS than this isn't a tablejump we understand. */
535 if (GET_CODE (x) != PLUS)
536 return NULL_RTX;
537
538 /* At this point we should have an expression representing the jump table
539 plus an offset. Examine each operand in order to determine which one
540 represents the jump table. Knowing that tells us that the other operand
541 must represent the offset. */
542 for (i = 0; i < 2; i++)
543 {
544 old_insn = insn;
545 y = XEXP (x, i);
546
f8cfc6aa 547 for (old_y = NULL_RTX; REG_P (y) && y != old_y;
d644189f
JW
548 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
549 ;
550
551 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
552 && reg_mentioned_p (label, y))
553 break;
554 }
555
556 if (i >= 2)
557 return NULL_RTX;
558
559 x = XEXP (x, 1 - i);
560
561 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
562 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
563 for (i = 0; i < 2; i++)
564 if (XEXP (x, i) == pic_offset_table_rtx)
565 {
566 x = XEXP (x, 1 - i);
567 break;
568 }
569
570 if (earliest)
571 *earliest = insn;
572
573 /* Return the RTL expression representing the offset. */
574 return x;
575}
576\f
c14b9960
JW
577/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
578 a global register. */
579
580static int
0c20a65f 581global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
c14b9960
JW
582{
583 int regno;
584 rtx x = *loc;
585
586 if (! x)
587 return 0;
588
589 switch (GET_CODE (x))
590 {
591 case SUBREG:
f8cfc6aa 592 if (REG_P (SUBREG_REG (x)))
c14b9960
JW
593 {
594 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
595 && global_regs[subreg_regno (x)])
596 return 1;
597 return 0;
598 }
599 break;
600
601 case REG:
602 regno = REGNO (x);
603 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
604 return 1;
605 return 0;
606
607 case SCRATCH:
608 case PC:
609 case CC0:
610 case CONST_INT:
611 case CONST_DOUBLE:
612 case CONST:
613 case LABEL_REF:
614 return 0;
615
616 case CALL:
617 /* A non-constant call might use a global register. */
618 return 1;
619
620 default:
621 break;
622 }
623
624 return 0;
625}
626
40f03658 627/* Returns nonzero if X mentions a global register. */
c14b9960
JW
628
629int
0c20a65f 630global_reg_mentioned_p (rtx x)
c14b9960 631{
c14b9960
JW
632 if (INSN_P (x))
633 {
4b4bf941 634 if (CALL_P (x))
c14b9960
JW
635 {
636 if (! CONST_OR_PURE_CALL_P (x))
637 return 1;
638 x = CALL_INSN_FUNCTION_USAGE (x);
639 if (x == 0)
640 return 0;
a6a2274a 641 }
c14b9960 642 else
a6a2274a 643 x = PATTERN (x);
c14b9960
JW
644 }
645
646 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
647}
648\f
4b983fdc
RH
649/* Return the number of places FIND appears within X. If COUNT_DEST is
650 zero, we do not count occurrences inside the destination of a SET. */
651
652int
0c20a65f 653count_occurrences (rtx x, rtx find, int count_dest)
4b983fdc
RH
654{
655 int i, j;
656 enum rtx_code code;
657 const char *format_ptr;
658 int count;
659
660 if (x == find)
661 return 1;
662
663 code = GET_CODE (x);
664
665 switch (code)
666 {
667 case REG:
668 case CONST_INT:
669 case CONST_DOUBLE:
69ef87e2 670 case CONST_VECTOR:
4b983fdc
RH
671 case SYMBOL_REF:
672 case CODE_LABEL:
673 case PC:
674 case CC0:
675 return 0;
676
677 case MEM:
3c0cb5de 678 if (MEM_P (find) && rtx_equal_p (x, find))
4b983fdc
RH
679 return 1;
680 break;
681
682 case SET:
683 if (SET_DEST (x) == find && ! count_dest)
684 return count_occurrences (SET_SRC (x), find, count_dest);
685 break;
686
687 default:
688 break;
689 }
690
691 format_ptr = GET_RTX_FORMAT (code);
692 count = 0;
693
694 for (i = 0; i < GET_RTX_LENGTH (code); i++)
695 {
696 switch (*format_ptr++)
697 {
698 case 'e':
699 count += count_occurrences (XEXP (x, i), find, count_dest);
700 break;
701
702 case 'E':
703 for (j = 0; j < XVECLEN (x, i); j++)
704 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
705 break;
706 }
707 }
708 return count;
709}
710\f
2c88418c
RS
711/* Nonzero if register REG appears somewhere within IN.
712 Also works if REG is not a register; in this case it checks
713 for a subexpression of IN that is Lisp "equal" to REG. */
714
715int
0c20a65f 716reg_mentioned_p (rtx reg, rtx in)
2c88418c 717{
b3694847
SS
718 const char *fmt;
719 int i;
720 enum rtx_code code;
2c88418c
RS
721
722 if (in == 0)
723 return 0;
724
725 if (reg == in)
726 return 1;
727
728 if (GET_CODE (in) == LABEL_REF)
729 return reg == XEXP (in, 0);
730
731 code = GET_CODE (in);
732
733 switch (code)
734 {
735 /* Compare registers by number. */
736 case REG:
f8cfc6aa 737 return REG_P (reg) && REGNO (in) == REGNO (reg);
2c88418c
RS
738
739 /* These codes have no constituent expressions
740 and are unique. */
741 case SCRATCH:
742 case CC0:
743 case PC:
744 return 0;
745
746 case CONST_INT:
69ef87e2 747 case CONST_VECTOR:
2c88418c
RS
748 case CONST_DOUBLE:
749 /* These are kept unique for a given value. */
750 return 0;
a6a2274a 751
e9a25f70
JL
752 default:
753 break;
2c88418c
RS
754 }
755
756 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
757 return 1;
758
759 fmt = GET_RTX_FORMAT (code);
760
761 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
762 {
763 if (fmt[i] == 'E')
764 {
b3694847 765 int j;
2c88418c
RS
766 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
767 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
768 return 1;
769 }
770 else if (fmt[i] == 'e'
771 && reg_mentioned_p (reg, XEXP (in, i)))
772 return 1;
773 }
774 return 0;
775}
776\f
777/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
778 no CODE_LABEL insn. */
779
780int
0c20a65f 781no_labels_between_p (rtx beg, rtx end)
2c88418c 782{
b3694847 783 rtx p;
978f547f
JH
784 if (beg == end)
785 return 0;
2c88418c 786 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
4b4bf941 787 if (LABEL_P (p))
2c88418c
RS
788 return 0;
789 return 1;
790}
791
3ec2b590
R
792/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
793 no JUMP_INSN insn. */
794
795int
0c20a65f 796no_jumps_between_p (rtx beg, rtx end)
3ec2b590 797{
b3694847 798 rtx p;
3ec2b590 799 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
4b4bf941 800 if (JUMP_P (p))
3ec2b590
R
801 return 0;
802 return 1;
803}
804
2c88418c
RS
805/* Nonzero if register REG is used in an insn between
806 FROM_INSN and TO_INSN (exclusive of those two). */
807
808int
0c20a65f 809reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
2c88418c 810{
b3694847 811 rtx insn;
2c88418c
RS
812
813 if (from_insn == to_insn)
814 return 0;
815
816 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 817 if (INSN_P (insn)
8f3e7a26 818 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
4b4bf941 819 || (CALL_P (insn)
8f3e7a26
RK
820 && (find_reg_fusage (insn, USE, reg)
821 || find_reg_fusage (insn, CLOBBER, reg)))))
2c88418c
RS
822 return 1;
823 return 0;
824}
825\f
826/* Nonzero if the old value of X, a register, is referenced in BODY. If X
827 is entirely replaced by a new value and the only use is as a SET_DEST,
828 we do not consider it a reference. */
829
830int
0c20a65f 831reg_referenced_p (rtx x, rtx body)
2c88418c
RS
832{
833 int i;
834
835 switch (GET_CODE (body))
836 {
837 case SET:
838 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
839 return 1;
840
841 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
842 of a REG that occupies all of the REG, the insn references X if
843 it is mentioned in the destination. */
844 if (GET_CODE (SET_DEST (body)) != CC0
845 && GET_CODE (SET_DEST (body)) != PC
f8cfc6aa 846 && !REG_P (SET_DEST (body))
2c88418c 847 && ! (GET_CODE (SET_DEST (body)) == SUBREG
f8cfc6aa 848 && REG_P (SUBREG_REG (SET_DEST (body)))
2c88418c
RS
849 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
850 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
851 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
852 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
853 && reg_overlap_mentioned_p (x, SET_DEST (body)))
854 return 1;
e9a25f70 855 return 0;
2c88418c
RS
856
857 case ASM_OPERANDS:
858 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
859 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
860 return 1;
e9a25f70 861 return 0;
2c88418c
RS
862
863 case CALL:
864 case USE:
14a774a9 865 case IF_THEN_ELSE:
2c88418c
RS
866 return reg_overlap_mentioned_p (x, body);
867
868 case TRAP_IF:
869 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
870
21b8482a
JJ
871 case PREFETCH:
872 return reg_overlap_mentioned_p (x, XEXP (body, 0));
873
2ac4fed0
RK
874 case UNSPEC:
875 case UNSPEC_VOLATILE:
2f9fb4c2
R
876 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
877 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
878 return 1;
879 return 0;
880
2c88418c
RS
881 case PARALLEL:
882 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
883 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
884 return 1;
e9a25f70 885 return 0;
a6a2274a 886
0d3ffb5a 887 case CLOBBER:
3c0cb5de 888 if (MEM_P (XEXP (body, 0)))
0d3ffb5a
GK
889 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
890 return 1;
891 return 0;
892
0c99ec5c
RH
893 case COND_EXEC:
894 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
895 return 1;
896 return reg_referenced_p (x, COND_EXEC_CODE (body));
897
e9a25f70
JL
898 default:
899 return 0;
2c88418c 900 }
2c88418c
RS
901}
902
903/* Nonzero if register REG is referenced in an insn between
904 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
0f41302f 905 not count. */
2c88418c
RS
906
907int
0c20a65f 908reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
2c88418c 909{
b3694847 910 rtx insn;
2c88418c
RS
911
912 if (from_insn == to_insn)
913 return 0;
914
915 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 916 if (INSN_P (insn)
8f3e7a26 917 && (reg_referenced_p (reg, PATTERN (insn))
4b4bf941 918 || (CALL_P (insn)
8f3e7a26 919 && find_reg_fusage (insn, USE, reg))))
2c88418c
RS
920 return 1;
921 return 0;
922}
923\f
924/* Nonzero if register REG is set or clobbered in an insn between
925 FROM_INSN and TO_INSN (exclusive of those two). */
926
927int
0c20a65f 928reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
2c88418c 929{
b3694847 930 rtx insn;
2c88418c
RS
931
932 if (from_insn == to_insn)
933 return 0;
934
935 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 936 if (INSN_P (insn) && reg_set_p (reg, insn))
2c88418c
RS
937 return 1;
938 return 0;
939}
940
941/* Internals of reg_set_between_p. */
2c88418c 942int
0c20a65f 943reg_set_p (rtx reg, rtx insn)
2c88418c 944{
2c88418c
RS
945 /* We can be passed an insn or part of one. If we are passed an insn,
946 check if a side-effect of the insn clobbers REG. */
4977bab6
ZW
947 if (INSN_P (insn)
948 && (FIND_REG_INC_NOTE (insn, reg)
4b4bf941 949 || (CALL_P (insn)
2c88418c
RS
950 /* We'd like to test call_used_regs here, but rtlanal.c can't
951 reference that variable due to its use in genattrtab. So
8f3e7a26
RK
952 we'll just be more conservative.
953
954 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
955 information holds all clobbered registers. */
f8cfc6aa 956 && ((REG_P (reg)
2c88418c 957 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3c0cb5de 958 || MEM_P (reg)
4977bab6
ZW
959 || find_reg_fusage (insn, CLOBBER, reg)))))
960 return 1;
2c88418c 961
91b2d119 962 return set_of (reg, insn) != NULL_RTX;
2c88418c
RS
963}
964
a2e1a0bf
RH
965/* Similar to reg_set_between_p, but check all registers in X. Return 0
966 only if none of them are modified between START and END. Do not
967 consider non-registers one way or the other. */
968
969int
0c20a65f 970regs_set_between_p (rtx x, rtx start, rtx end)
a2e1a0bf
RH
971{
972 enum rtx_code code = GET_CODE (x);
6f7d635c 973 const char *fmt;
a2e1a0bf
RH
974 int i, j;
975
976 switch (code)
977 {
978 case CONST_INT:
979 case CONST_DOUBLE:
69ef87e2 980 case CONST_VECTOR:
a2e1a0bf
RH
981 case CONST:
982 case SYMBOL_REF:
983 case LABEL_REF:
984 case PC:
985 case CC0:
986 return 0;
987
988 case REG:
989 return reg_set_between_p (x, start, end);
a6a2274a 990
a2e1a0bf
RH
991 default:
992 break;
993 }
994
995 fmt = GET_RTX_FORMAT (code);
996 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
997 {
998 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
999 return 1;
1000
1001 else if (fmt[i] == 'E')
1002 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1003 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
1004 return 1;
1005 }
1006
1007 return 0;
1008}
1009
2c88418c
RS
1010/* Similar to reg_set_between_p, but check all registers in X. Return 0
1011 only if none of them are modified between START and END. Return 1 if
7b52eede 1012 X contains a MEM; this routine does usememory aliasing. */
2c88418c
RS
1013
1014int
0c20a65f 1015modified_between_p (rtx x, rtx start, rtx end)
2c88418c
RS
1016{
1017 enum rtx_code code = GET_CODE (x);
6f7d635c 1018 const char *fmt;
f8163c92 1019 int i, j;
7b52eede
JH
1020 rtx insn;
1021
1022 if (start == end)
1023 return 0;
2c88418c
RS
1024
1025 switch (code)
1026 {
1027 case CONST_INT:
1028 case CONST_DOUBLE:
69ef87e2 1029 case CONST_VECTOR:
2c88418c
RS
1030 case CONST:
1031 case SYMBOL_REF:
1032 case LABEL_REF:
1033 return 0;
1034
1035 case PC:
1036 case CC0:
1037 return 1;
1038
1039 case MEM:
7b52eede
JH
1040 if (RTX_UNCHANGING_P (x))
1041 return 0;
1042 if (modified_between_p (XEXP (x, 0), start, end))
2c88418c 1043 return 1;
7b52eede
JH
1044 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1045 if (memory_modified_in_insn_p (x, insn))
1046 return 1;
1047 return 0;
2c88418c
RS
1048 break;
1049
1050 case REG:
1051 return reg_set_between_p (x, start, end);
a6a2274a 1052
e9a25f70
JL
1053 default:
1054 break;
2c88418c
RS
1055 }
1056
1057 fmt = GET_RTX_FORMAT (code);
1058 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
f8163c92
RK
1059 {
1060 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1061 return 1;
1062
d4757e6a 1063 else if (fmt[i] == 'E')
f8163c92
RK
1064 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1065 if (modified_between_p (XVECEXP (x, i, j), start, end))
1066 return 1;
1067 }
1068
1069 return 0;
1070}
1071
1072/* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1073 of them are modified in INSN. Return 1 if X contains a MEM; this routine
7b52eede 1074 does use memory aliasing. */
f8163c92
RK
1075
1076int
0c20a65f 1077modified_in_p (rtx x, rtx insn)
f8163c92
RK
1078{
1079 enum rtx_code code = GET_CODE (x);
6f7d635c 1080 const char *fmt;
f8163c92
RK
1081 int i, j;
1082
1083 switch (code)
1084 {
1085 case CONST_INT:
1086 case CONST_DOUBLE:
69ef87e2 1087 case CONST_VECTOR:
f8163c92
RK
1088 case CONST:
1089 case SYMBOL_REF:
1090 case LABEL_REF:
1091 return 0;
1092
1093 case PC:
1094 case CC0:
2c88418c
RS
1095 return 1;
1096
f8163c92 1097 case MEM:
7b52eede
JH
1098 if (RTX_UNCHANGING_P (x))
1099 return 0;
1100 if (modified_in_p (XEXP (x, 0), insn))
f8163c92 1101 return 1;
7b52eede
JH
1102 if (memory_modified_in_insn_p (x, insn))
1103 return 1;
1104 return 0;
f8163c92
RK
1105 break;
1106
1107 case REG:
1108 return reg_set_p (x, insn);
e9a25f70
JL
1109
1110 default:
1111 break;
f8163c92
RK
1112 }
1113
1114 fmt = GET_RTX_FORMAT (code);
1115 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1116 {
1117 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1118 return 1;
1119
d4757e6a 1120 else if (fmt[i] == 'E')
f8163c92
RK
1121 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1122 if (modified_in_p (XVECEXP (x, i, j), insn))
1123 return 1;
1124 }
1125
2c88418c
RS
1126 return 0;
1127}
a8393d5d 1128
4eb00163 1129/* Return true if anything in insn X is (anti,output,true) dependent on
a8393d5d
RH
1130 anything in insn Y. */
1131
1132int
0c20a65f 1133insn_dependent_p (rtx x, rtx y)
a8393d5d
RH
1134{
1135 rtx tmp;
1136
1137 if (! INSN_P (x) || ! INSN_P (y))
1138 abort ();
1139
1140 tmp = PATTERN (y);
4eb00163 1141 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
a8393d5d
RH
1142 if (tmp == NULL_RTX)
1143 return 1;
1144
1145 tmp = PATTERN (x);
4eb00163 1146 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
a8393d5d
RH
1147 if (tmp == NULL_RTX)
1148 return 1;
1149
1150 return 0;
1151}
1152
4eb00163 1153/* A helper routine for insn_dependent_p called through note_stores. */
a8393d5d
RH
1154
1155static void
0c20a65f 1156insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
a8393d5d
RH
1157{
1158 rtx * pinsn = (rtx *) data;
1159
1160 if (*pinsn && reg_mentioned_p (x, *pinsn))
1161 *pinsn = NULL_RTX;
1162}
2c88418c 1163\f
91b2d119
JH
1164/* Helper function for set_of. */
1165struct set_of_data
1166 {
1167 rtx found;
1168 rtx pat;
1169 };
1170
1171static void
0c20a65f 1172set_of_1 (rtx x, rtx pat, void *data1)
91b2d119
JH
1173{
1174 struct set_of_data *data = (struct set_of_data *) (data1);
1175 if (rtx_equal_p (x, data->pat)
3c0cb5de 1176 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
91b2d119
JH
1177 data->found = pat;
1178}
1179
1180/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
eaec9b3d 1181 (either directly or via STRICT_LOW_PART and similar modifiers). */
91b2d119 1182rtx
0c20a65f 1183set_of (rtx pat, rtx insn)
91b2d119
JH
1184{
1185 struct set_of_data data;
1186 data.found = NULL_RTX;
1187 data.pat = pat;
1188 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1189 return data.found;
1190}
1191\f
2c88418c
RS
1192/* Given an INSN, return a SET expression if this insn has only a single SET.
1193 It may also have CLOBBERs, USEs, or SET whose output
1194 will not be used, which we ignore. */
1195
1196rtx
0c20a65f 1197single_set_2 (rtx insn, rtx pat)
2c88418c 1198{
c9b89a21
JH
1199 rtx set = NULL;
1200 int set_verified = 1;
2c88418c 1201 int i;
c9b89a21 1202
b1cdafbb 1203 if (GET_CODE (pat) == PARALLEL)
2c88418c 1204 {
c9b89a21 1205 for (i = 0; i < XVECLEN (pat, 0); i++)
b1cdafbb 1206 {
c9b89a21
JH
1207 rtx sub = XVECEXP (pat, 0, i);
1208 switch (GET_CODE (sub))
1209 {
1210 case USE:
1211 case CLOBBER:
1212 break;
1213
1214 case SET:
1215 /* We can consider insns having multiple sets, where all
1216 but one are dead as single set insns. In common case
1217 only single set is present in the pattern so we want
f63d1bf7 1218 to avoid checking for REG_UNUSED notes unless necessary.
c9b89a21
JH
1219
1220 When we reach set first time, we just expect this is
1221 the single set we are looking for and only when more
1222 sets are found in the insn, we check them. */
1223 if (!set_verified)
1224 {
1225 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1226 && !side_effects_p (set))
1227 set = NULL;
1228 else
1229 set_verified = 1;
1230 }
1231 if (!set)
1232 set = sub, set_verified = 0;
1233 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1234 || side_effects_p (sub))
1235 return NULL_RTX;
1236 break;
1237
1238 default:
1239 return NULL_RTX;
1240 }
787ccee0 1241 }
2c88418c 1242 }
c9b89a21 1243 return set;
2c88418c 1244}
941c63ac
JL
1245
1246/* Given an INSN, return nonzero if it has more than one SET, else return
1247 zero. */
1248
5f7d3786 1249int
0c20a65f 1250multiple_sets (rtx insn)
941c63ac 1251{
cae8acdd 1252 int found;
941c63ac 1253 int i;
a6a2274a 1254
941c63ac 1255 /* INSN must be an insn. */
2c3c49de 1256 if (! INSN_P (insn))
941c63ac
JL
1257 return 0;
1258
1259 /* Only a PARALLEL can have multiple SETs. */
1260 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1261 {
1262 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1263 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1264 {
1265 /* If we have already found a SET, then return now. */
1266 if (found)
1267 return 1;
1268 else
1269 found = 1;
1270 }
1271 }
a6a2274a 1272
941c63ac
JL
1273 /* Either zero or one SET. */
1274 return 0;
1275}
2c88418c 1276\f
7142e318
JW
1277/* Return nonzero if the destination of SET equals the source
1278 and there are no side effects. */
1279
1280int
0c20a65f 1281set_noop_p (rtx set)
7142e318
JW
1282{
1283 rtx src = SET_SRC (set);
1284 rtx dst = SET_DEST (set);
1285
371b8fc0
JH
1286 if (dst == pc_rtx && src == pc_rtx)
1287 return 1;
1288
3c0cb5de 1289 if (MEM_P (dst) && MEM_P (src))
cd648cec
JH
1290 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1291
7142e318
JW
1292 if (GET_CODE (dst) == SIGN_EXTRACT
1293 || GET_CODE (dst) == ZERO_EXTRACT)
1294 return rtx_equal_p (XEXP (dst, 0), src)
cd648cec
JH
1295 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1296 && !side_effects_p (src);
7142e318
JW
1297
1298 if (GET_CODE (dst) == STRICT_LOW_PART)
1299 dst = XEXP (dst, 0);
1300
1301 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1302 {
1303 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1304 return 0;
1305 src = SUBREG_REG (src);
1306 dst = SUBREG_REG (dst);
1307 }
1308
f8cfc6aa 1309 return (REG_P (src) && REG_P (dst)
7142e318
JW
1310 && REGNO (src) == REGNO (dst));
1311}
0005550b
JH
1312\f
1313/* Return nonzero if an insn consists only of SETs, each of which only sets a
1314 value to itself. */
1315
1316int
0c20a65f 1317noop_move_p (rtx insn)
0005550b
JH
1318{
1319 rtx pat = PATTERN (insn);
1320
b5832b43
JH
1321 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1322 return 1;
1323
0005550b
JH
1324 /* Insns carrying these notes are useful later on. */
1325 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1326 return 0;
1327
eb9d8e4d
JW
1328 /* For now treat an insn with a REG_RETVAL note as a
1329 a special insn which should not be considered a no-op. */
1330 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1331 return 0;
1332
0005550b
JH
1333 if (GET_CODE (pat) == SET && set_noop_p (pat))
1334 return 1;
1335
1336 if (GET_CODE (pat) == PARALLEL)
1337 {
1338 int i;
1339 /* If nothing but SETs of registers to themselves,
1340 this insn can also be deleted. */
1341 for (i = 0; i < XVECLEN (pat, 0); i++)
1342 {
1343 rtx tem = XVECEXP (pat, 0, i);
1344
1345 if (GET_CODE (tem) == USE
1346 || GET_CODE (tem) == CLOBBER)
1347 continue;
1348
1349 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1350 return 0;
1351 }
1352
1353 return 1;
1354 }
1355 return 0;
1356}
1357\f
7142e318 1358
63be01fb
JW
1359/* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1360 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1361 If the object was modified, if we hit a partial assignment to X, or hit a
1362 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1363 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1364 be the src. */
2c88418c
RS
1365
1366rtx
0c20a65f 1367find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
2c88418c
RS
1368{
1369 rtx p;
1370
4b4bf941 1371 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
2c88418c 1372 p = PREV_INSN (p))
2c3c49de 1373 if (INSN_P (p))
2c88418c
RS
1374 {
1375 rtx set = single_set (p);
c166a311 1376 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
2c88418c
RS
1377
1378 if (set && rtx_equal_p (x, SET_DEST (set)))
1379 {
1380 rtx src = SET_SRC (set);
1381
1382 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1383 src = XEXP (note, 0);
1384
63be01fb
JW
1385 if ((valid_to == NULL_RTX
1386 || ! modified_between_p (src, PREV_INSN (p), valid_to))
2c88418c
RS
1387 /* Reject hard registers because we don't usually want
1388 to use them; we'd rather use a pseudo. */
f8cfc6aa 1389 && (! (REG_P (src)
89d3d442 1390 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
2c88418c
RS
1391 {
1392 *pinsn = p;
1393 return src;
1394 }
1395 }
a6a2274a 1396
2c88418c
RS
1397 /* If set in non-simple way, we don't have a value. */
1398 if (reg_set_p (x, p))
1399 break;
1400 }
1401
1402 return x;
a6a2274a 1403}
2c88418c
RS
1404\f
1405/* Return nonzero if register in range [REGNO, ENDREGNO)
1406 appears either explicitly or implicitly in X
1407 other than being stored into.
1408
1409 References contained within the substructure at LOC do not count.
1410 LOC may be zero, meaning don't ignore anything. */
1411
1412int
0c20a65f
AJ
1413refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1414 rtx *loc)
2c88418c 1415{
770ae6cc
RK
1416 int i;
1417 unsigned int x_regno;
1418 RTX_CODE code;
1419 const char *fmt;
2c88418c
RS
1420
1421 repeat:
1422 /* The contents of a REG_NONNEG note is always zero, so we must come here
1423 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1424 if (x == 0)
1425 return 0;
1426
1427 code = GET_CODE (x);
1428
1429 switch (code)
1430 {
1431 case REG:
770ae6cc 1432 x_regno = REGNO (x);
f8163c92
RK
1433
1434 /* If we modifying the stack, frame, or argument pointer, it will
1435 clobber a virtual register. In fact, we could be more precise,
1436 but it isn't worth it. */
770ae6cc 1437 if ((x_regno == STACK_POINTER_REGNUM
f8163c92 1438#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
770ae6cc 1439 || x_regno == ARG_POINTER_REGNUM
f8163c92 1440#endif
770ae6cc 1441 || x_regno == FRAME_POINTER_REGNUM)
f8163c92
RK
1442 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1443 return 1;
1444
770ae6cc 1445 return (endregno > x_regno
a6a2274a 1446 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
66fd46b6 1447 ? hard_regno_nregs[x_regno][GET_MODE (x)]
2c88418c
RS
1448 : 1));
1449
1450 case SUBREG:
1451 /* If this is a SUBREG of a hard reg, we can see exactly which
1452 registers are being modified. Otherwise, handle normally. */
f8cfc6aa 1453 if (REG_P (SUBREG_REG (x))
2c88418c
RS
1454 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1455 {
ddef6bc7 1456 unsigned int inner_regno = subreg_regno (x);
770ae6cc 1457 unsigned int inner_endregno
403c659c
DE
1458 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1459 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
2c88418c
RS
1460
1461 return endregno > inner_regno && regno < inner_endregno;
1462 }
1463 break;
1464
1465 case CLOBBER:
1466 case SET:
1467 if (&SET_DEST (x) != loc
1468 /* Note setting a SUBREG counts as referring to the REG it is in for
1469 a pseudo but not for hard registers since we can
1470 treat each word individually. */
1471 && ((GET_CODE (SET_DEST (x)) == SUBREG
1472 && loc != &SUBREG_REG (SET_DEST (x))
f8cfc6aa 1473 && REG_P (SUBREG_REG (SET_DEST (x)))
2c88418c
RS
1474 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1475 && refers_to_regno_p (regno, endregno,
1476 SUBREG_REG (SET_DEST (x)), loc))
f8cfc6aa 1477 || (!REG_P (SET_DEST (x))
2c88418c
RS
1478 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1479 return 1;
1480
1481 if (code == CLOBBER || loc == &SET_SRC (x))
1482 return 0;
1483 x = SET_SRC (x);
1484 goto repeat;
e9a25f70
JL
1485
1486 default:
1487 break;
2c88418c
RS
1488 }
1489
1490 /* X does not match, so try its subexpressions. */
1491
1492 fmt = GET_RTX_FORMAT (code);
1493 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1494 {
1495 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1496 {
1497 if (i == 0)
1498 {
1499 x = XEXP (x, 0);
1500 goto repeat;
1501 }
1502 else
1503 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1504 return 1;
1505 }
1506 else if (fmt[i] == 'E')
1507 {
b3694847 1508 int j;
6a87d634 1509 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2c88418c
RS
1510 if (loc != &XVECEXP (x, i, j)
1511 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1512 return 1;
1513 }
1514 }
1515 return 0;
1516}
1517
1518/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1519 we check if any register number in X conflicts with the relevant register
1520 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1521 contains a MEM (we don't bother checking for memory addresses that can't
1522 conflict because we expect this to be a rare case. */
1523
1524int
0c20a65f 1525reg_overlap_mentioned_p (rtx x, rtx in)
2c88418c 1526{
770ae6cc 1527 unsigned int regno, endregno;
2c88418c 1528
6f626d1b
PB
1529 /* If either argument is a constant, then modifying X can not
1530 affect IN. Here we look at IN, we can profitably combine
1531 CONSTANT_P (x) with the switch statement below. */
1532 if (CONSTANT_P (in))
b98b49ac 1533 return 0;
0c99ec5c 1534
6f626d1b 1535 recurse:
0c99ec5c 1536 switch (GET_CODE (x))
2c88418c 1537 {
6f626d1b
PB
1538 case STRICT_LOW_PART:
1539 case ZERO_EXTRACT:
1540 case SIGN_EXTRACT:
1541 /* Overly conservative. */
1542 x = XEXP (x, 0);
1543 goto recurse;
1544
0c99ec5c 1545 case SUBREG:
2c88418c
RS
1546 regno = REGNO (SUBREG_REG (x));
1547 if (regno < FIRST_PSEUDO_REGISTER)
ddef6bc7 1548 regno = subreg_regno (x);
0c99ec5c 1549 goto do_reg;
2c88418c 1550
0c99ec5c
RH
1551 case REG:
1552 regno = REGNO (x);
1553 do_reg:
1554 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
66fd46b6 1555 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
8e2e89f7 1556 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
2c88418c 1557
0c99ec5c
RH
1558 case MEM:
1559 {
1560 const char *fmt;
1561 int i;
2c88418c 1562
3c0cb5de 1563 if (MEM_P (in))
2c88418c
RS
1564 return 1;
1565
0c99ec5c
RH
1566 fmt = GET_RTX_FORMAT (GET_CODE (in));
1567 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1568 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1569 return 1;
c0222c21 1570
0c99ec5c
RH
1571 return 0;
1572 }
1573
1574 case SCRATCH:
1575 case PC:
1576 case CC0:
1577 return reg_mentioned_p (x, in);
1578
1579 case PARALLEL:
37ceff9d 1580 {
90d036a0 1581 int i;
37ceff9d
RH
1582
1583 /* If any register in here refers to it we return true. */
7193d1dc
RK
1584 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1585 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1586 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
6f626d1b 1587 return 1;
7193d1dc 1588 return 0;
37ceff9d 1589 }
2c88418c 1590
0c99ec5c 1591 default:
6f626d1b
PB
1592#ifdef ENABLE_CHECKING
1593 if (!CONSTANT_P (x))
1594 abort ();
1595#endif
2c88418c 1596
6f626d1b
PB
1597 return 0;
1598 }
2c88418c
RS
1599}
1600\f
2c88418c
RS
1601/* Call FUN on each register or MEM that is stored into or clobbered by X.
1602 (X would be the pattern of an insn).
1603 FUN receives two arguments:
1604 the REG, MEM, CC0 or PC being stored in or clobbered,
1605 the SET or CLOBBER rtx that does the store.
1606
1607 If the item being stored in or clobbered is a SUBREG of a hard register,
1608 the SUBREG will be passed. */
a6a2274a 1609
2c88418c 1610void
0c20a65f 1611note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
2c88418c 1612{
90d036a0
RK
1613 int i;
1614
0c99ec5c
RH
1615 if (GET_CODE (x) == COND_EXEC)
1616 x = COND_EXEC_CODE (x);
90d036a0 1617
0c99ec5c 1618 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2c88418c 1619 {
b3694847 1620 rtx dest = SET_DEST (x);
90d036a0 1621
2c88418c 1622 while ((GET_CODE (dest) == SUBREG
f8cfc6aa 1623 && (!REG_P (SUBREG_REG (dest))
2c88418c
RS
1624 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1625 || GET_CODE (dest) == ZERO_EXTRACT
1626 || GET_CODE (dest) == SIGN_EXTRACT
1627 || GET_CODE (dest) == STRICT_LOW_PART)
1628 dest = XEXP (dest, 0);
86465af7 1629
7193d1dc 1630 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
629111c7 1631 each of whose first operand is a register. */
7193d1dc
RK
1632 if (GET_CODE (dest) == PARALLEL)
1633 {
1634 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1635 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
629111c7 1636 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
7193d1dc 1637 }
86465af7 1638 else
84832317 1639 (*fun) (dest, x, data);
2c88418c 1640 }
770ae6cc 1641
90d036a0
RK
1642 else if (GET_CODE (x) == PARALLEL)
1643 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1644 note_stores (XVECEXP (x, 0, i), fun, data);
2c88418c
RS
1645}
1646\f
e2373f95
RK
1647/* Like notes_stores, but call FUN for each expression that is being
1648 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1649 FUN for each expression, not any interior subexpressions. FUN receives a
1650 pointer to the expression and the DATA passed to this function.
1651
1652 Note that this is not quite the same test as that done in reg_referenced_p
1653 since that considers something as being referenced if it is being
1654 partially set, while we do not. */
1655
1656void
0c20a65f 1657note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
e2373f95
RK
1658{
1659 rtx body = *pbody;
1660 int i;
1661
1662 switch (GET_CODE (body))
1663 {
1664 case COND_EXEC:
1665 (*fun) (&COND_EXEC_TEST (body), data);
1666 note_uses (&COND_EXEC_CODE (body), fun, data);
1667 return;
1668
1669 case PARALLEL:
1670 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1671 note_uses (&XVECEXP (body, 0, i), fun, data);
1672 return;
1673
1674 case USE:
1675 (*fun) (&XEXP (body, 0), data);
1676 return;
1677
1678 case ASM_OPERANDS:
1679 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1680 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1681 return;
1682
1683 case TRAP_IF:
1684 (*fun) (&TRAP_CONDITION (body), data);
1685 return;
1686
21b8482a
JJ
1687 case PREFETCH:
1688 (*fun) (&XEXP (body, 0), data);
1689 return;
1690
e2373f95
RK
1691 case UNSPEC:
1692 case UNSPEC_VOLATILE:
1693 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1694 (*fun) (&XVECEXP (body, 0, i), data);
1695 return;
1696
1697 case CLOBBER:
3c0cb5de 1698 if (MEM_P (XEXP (body, 0)))
e2373f95
RK
1699 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1700 return;
1701
1702 case SET:
1703 {
1704 rtx dest = SET_DEST (body);
1705
1706 /* For sets we replace everything in source plus registers in memory
1707 expression in store and operands of a ZERO_EXTRACT. */
1708 (*fun) (&SET_SRC (body), data);
1709
1710 if (GET_CODE (dest) == ZERO_EXTRACT)
1711 {
1712 (*fun) (&XEXP (dest, 1), data);
1713 (*fun) (&XEXP (dest, 2), data);
1714 }
1715
1716 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1717 dest = XEXP (dest, 0);
1718
3c0cb5de 1719 if (MEM_P (dest))
e2373f95
RK
1720 (*fun) (&XEXP (dest, 0), data);
1721 }
1722 return;
1723
1724 default:
1725 /* All the other possibilities never store. */
1726 (*fun) (pbody, data);
1727 return;
1728 }
1729}
1730\f
2c88418c
RS
1731/* Return nonzero if X's old contents don't survive after INSN.
1732 This will be true if X is (cc0) or if X is a register and
1733 X dies in INSN or because INSN entirely sets X.
1734
1735 "Entirely set" means set directly and not through a SUBREG,
1736 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1737 Likewise, REG_INC does not count.
1738
1739 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1740 but for this use that makes no difference, since regs don't overlap
1741 during their lifetimes. Therefore, this function may be used
1742 at any time after deaths have been computed (in flow.c).
1743
1744 If REG is a hard reg that occupies multiple machine registers, this
1745 function will only return 1 if each of those registers will be replaced
1746 by INSN. */
1747
1748int
0c20a65f 1749dead_or_set_p (rtx insn, rtx x)
2c88418c 1750{
770ae6cc
RK
1751 unsigned int regno, last_regno;
1752 unsigned int i;
2c88418c
RS
1753
1754 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1755 if (GET_CODE (x) == CC0)
1756 return 1;
1757
f8cfc6aa 1758 if (!REG_P (x))
2c88418c
RS
1759 abort ();
1760
1761 regno = REGNO (x);
1762 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
66fd46b6 1763 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
2c88418c
RS
1764
1765 for (i = regno; i <= last_regno; i++)
1766 if (! dead_or_set_regno_p (insn, i))
1767 return 0;
1768
1769 return 1;
1770}
1771
1772/* Utility function for dead_or_set_p to check an individual register. Also
1773 called from flow.c. */
1774
1775int
0c20a65f 1776dead_or_set_regno_p (rtx insn, unsigned int test_regno)
2c88418c 1777{
770ae6cc 1778 unsigned int regno, endregno;
8c8de5fc 1779 rtx pattern;
2c88418c 1780
0a2287bf
RH
1781 /* See if there is a death note for something that includes TEST_REGNO. */
1782 if (find_regno_note (insn, REG_DEAD, test_regno))
1783 return 1;
2c88418c 1784
4b4bf941 1785 if (CALL_P (insn)
8f3e7a26
RK
1786 && find_regno_fusage (insn, CLOBBER, test_regno))
1787 return 1;
1788
0c99ec5c
RH
1789 pattern = PATTERN (insn);
1790
1791 if (GET_CODE (pattern) == COND_EXEC)
1792 pattern = COND_EXEC_CODE (pattern);
1793
1794 if (GET_CODE (pattern) == SET)
2c88418c 1795 {
92d9256d 1796 rtx dest = SET_DEST (pattern);
a6a2274a 1797
2c88418c
RS
1798 /* A value is totally replaced if it is the destination or the
1799 destination is a SUBREG of REGNO that does not change the number of
1800 words in it. */
6764d250 1801 if (GET_CODE (dest) == SUBREG
2c88418c
RS
1802 && (((GET_MODE_SIZE (GET_MODE (dest))
1803 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1804 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1805 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1806 dest = SUBREG_REG (dest);
1807
f8cfc6aa 1808 if (!REG_P (dest))
2c88418c
RS
1809 return 0;
1810
1811 regno = REGNO (dest);
1812 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
66fd46b6 1813 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
2c88418c
RS
1814
1815 return (test_regno >= regno && test_regno < endregno);
1816 }
0c99ec5c 1817 else if (GET_CODE (pattern) == PARALLEL)
2c88418c 1818 {
b3694847 1819 int i;
2c88418c 1820
0c99ec5c 1821 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2c88418c 1822 {
0c99ec5c
RH
1823 rtx body = XVECEXP (pattern, 0, i);
1824
1825 if (GET_CODE (body) == COND_EXEC)
1826 body = COND_EXEC_CODE (body);
2c88418c
RS
1827
1828 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1829 {
1830 rtx dest = SET_DEST (body);
1831
1832 if (GET_CODE (dest) == SUBREG
1833 && (((GET_MODE_SIZE (GET_MODE (dest))
1834 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1835 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1836 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1837 dest = SUBREG_REG (dest);
1838
f8cfc6aa 1839 if (!REG_P (dest))
2c88418c
RS
1840 continue;
1841
1842 regno = REGNO (dest);
1843 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
66fd46b6 1844 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
2c88418c
RS
1845
1846 if (test_regno >= regno && test_regno < endregno)
1847 return 1;
1848 }
1849 }
1850 }
1851
1852 return 0;
1853}
1854
1855/* Return the reg-note of kind KIND in insn INSN, if there is one.
1856 If DATUM is nonzero, look for one whose datum is DATUM. */
1857
1858rtx
0c20a65f 1859find_reg_note (rtx insn, enum reg_note kind, rtx datum)
2c88418c 1860{
b3694847 1861 rtx link;
2c88418c 1862
ae78d276 1863 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1864 if (! INSN_P (insn))
ae78d276 1865 return 0;
cd798543
AP
1866 if (datum == 0)
1867 {
1868 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1869 if (REG_NOTE_KIND (link) == kind)
1870 return link;
1871 return 0;
1872 }
ae78d276 1873
2c88418c 1874 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cd798543 1875 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2c88418c
RS
1876 return link;
1877 return 0;
1878}
1879
1880/* Return the reg-note of kind KIND in insn INSN which applies to register
99309f3b
RK
1881 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1882 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1883 it might be the case that the note overlaps REGNO. */
2c88418c
RS
1884
1885rtx
0c20a65f 1886find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
2c88418c 1887{
b3694847 1888 rtx link;
2c88418c 1889
ae78d276 1890 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1891 if (! INSN_P (insn))
ae78d276
MM
1892 return 0;
1893
2c88418c
RS
1894 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1895 if (REG_NOTE_KIND (link) == kind
1896 /* Verify that it is a register, so that scratch and MEM won't cause a
1897 problem here. */
f8cfc6aa 1898 && REG_P (XEXP (link, 0))
99309f3b
RK
1899 && REGNO (XEXP (link, 0)) <= regno
1900 && ((REGNO (XEXP (link, 0))
1901 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6
JH
1902 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1903 [GET_MODE (XEXP (link, 0))]))
99309f3b 1904 > regno))
2c88418c
RS
1905 return link;
1906 return 0;
1907}
8f3e7a26 1908
d9c695ff
RK
1909/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1910 has such a note. */
1911
1912rtx
0c20a65f 1913find_reg_equal_equiv_note (rtx insn)
d9c695ff 1914{
cd648cec 1915 rtx link;
d9c695ff 1916
cd648cec 1917 if (!INSN_P (insn))
d9c695ff 1918 return 0;
cd648cec
JH
1919 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1920 if (REG_NOTE_KIND (link) == REG_EQUAL
1921 || REG_NOTE_KIND (link) == REG_EQUIV)
1922 {
1923 if (single_set (insn) == 0)
1924 return 0;
1925 return link;
1926 }
1927 return NULL;
d9c695ff
RK
1928}
1929
8f3e7a26
RK
1930/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1931 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1932
1933int
0c20a65f 1934find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
8f3e7a26
RK
1935{
1936 /* If it's not a CALL_INSN, it can't possibly have a
1937 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
4b4bf941 1938 if (!CALL_P (insn))
8f3e7a26
RK
1939 return 0;
1940
1941 if (! datum)
8e2e89f7 1942 abort ();
8f3e7a26 1943
f8cfc6aa 1944 if (!REG_P (datum))
8f3e7a26 1945 {
b3694847 1946 rtx link;
8f3e7a26
RK
1947
1948 for (link = CALL_INSN_FUNCTION_USAGE (insn);
a6a2274a 1949 link;
8f3e7a26 1950 link = XEXP (link, 1))
a6a2274a 1951 if (GET_CODE (XEXP (link, 0)) == code
cc863bea 1952 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
a6a2274a 1953 return 1;
8f3e7a26
RK
1954 }
1955 else
1956 {
770ae6cc 1957 unsigned int regno = REGNO (datum);
8f3e7a26
RK
1958
1959 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1960 to pseudo registers, so don't bother checking. */
1961
1962 if (regno < FIRST_PSEUDO_REGISTER)
a6a2274a 1963 {
770ae6cc 1964 unsigned int end_regno
66fd46b6 1965 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
770ae6cc 1966 unsigned int i;
8f3e7a26
RK
1967
1968 for (i = regno; i < end_regno; i++)
1969 if (find_regno_fusage (insn, code, i))
1970 return 1;
a6a2274a 1971 }
8f3e7a26
RK
1972 }
1973
1974 return 0;
1975}
1976
1977/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1978 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1979
1980int
0c20a65f 1981find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
8f3e7a26 1982{
b3694847 1983 rtx link;
8f3e7a26
RK
1984
1985 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1986 to pseudo registers, so don't bother checking. */
1987
1988 if (regno >= FIRST_PSEUDO_REGISTER
4b4bf941 1989 || !CALL_P (insn) )
8f3e7a26
RK
1990 return 0;
1991
1992 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
83ab3839 1993 {
770ae6cc
RK
1994 unsigned int regnote;
1995 rtx op, reg;
83ab3839
RH
1996
1997 if (GET_CODE (op = XEXP (link, 0)) == code
f8cfc6aa 1998 && REG_P (reg = XEXP (op, 0))
83ab3839 1999 && (regnote = REGNO (reg)) <= regno
66fd46b6 2000 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
83ab3839
RH
2001 return 1;
2002 }
8f3e7a26
RK
2003
2004 return 0;
2005}
a6a063b8
AM
2006
2007/* Return true if INSN is a call to a pure function. */
2008
2009int
0c20a65f 2010pure_call_p (rtx insn)
a6a063b8
AM
2011{
2012 rtx link;
2013
4b4bf941 2014 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
a6a063b8
AM
2015 return 0;
2016
2017 /* Look for the note that differentiates const and pure functions. */
2018 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2019 {
2020 rtx u, m;
2021
2022 if (GET_CODE (u = XEXP (link, 0)) == USE
3c0cb5de 2023 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
a6a063b8
AM
2024 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2025 return 1;
2026 }
2027
2028 return 0;
2029}
2c88418c
RS
2030\f
2031/* Remove register note NOTE from the REG_NOTES of INSN. */
2032
2033void
0c20a65f 2034remove_note (rtx insn, rtx note)
2c88418c 2035{
b3694847 2036 rtx link;
2c88418c 2037
49c3bb12
RH
2038 if (note == NULL_RTX)
2039 return;
2040
2c88418c
RS
2041 if (REG_NOTES (insn) == note)
2042 {
2043 REG_NOTES (insn) = XEXP (note, 1);
2044 return;
2045 }
2046
2047 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2048 if (XEXP (link, 1) == note)
2049 {
2050 XEXP (link, 1) = XEXP (note, 1);
2051 return;
2052 }
2053
2054 abort ();
2055}
55a98783 2056
5f0d2358
RK
2057/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2058 return 1 if it is found. A simple equality test is used to determine if
2059 NODE matches. */
2060
2061int
0c20a65f 2062in_expr_list_p (rtx listp, rtx node)
5f0d2358
RK
2063{
2064 rtx x;
2065
2066 for (x = listp; x; x = XEXP (x, 1))
2067 if (node == XEXP (x, 0))
2068 return 1;
2069
2070 return 0;
2071}
2072
dd248abd
RK
2073/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2074 remove that entry from the list if it is found.
55a98783 2075
dd248abd 2076 A simple equality test is used to determine if NODE matches. */
55a98783
JL
2077
2078void
0c20a65f 2079remove_node_from_expr_list (rtx node, rtx *listp)
55a98783
JL
2080{
2081 rtx temp = *listp;
2082 rtx prev = NULL_RTX;
2083
2084 while (temp)
2085 {
2086 if (node == XEXP (temp, 0))
2087 {
2088 /* Splice the node out of the list. */
2089 if (prev)
2090 XEXP (prev, 1) = XEXP (temp, 1);
2091 else
2092 *listp = XEXP (temp, 1);
2093
2094 return;
2095 }
dd248abd
RK
2096
2097 prev = temp;
55a98783
JL
2098 temp = XEXP (temp, 1);
2099 }
2100}
2c88418c 2101\f
2b067faf
RS
2102/* Nonzero if X contains any volatile instructions. These are instructions
2103 which may cause unpredictable machine state instructions, and thus no
2104 instructions should be moved or combined across them. This includes
2105 only volatile asms and UNSPEC_VOLATILE instructions. */
2106
2107int
0c20a65f 2108volatile_insn_p (rtx x)
2b067faf 2109{
b3694847 2110 RTX_CODE code;
2b067faf
RS
2111
2112 code = GET_CODE (x);
2113 switch (code)
2114 {
2115 case LABEL_REF:
2116 case SYMBOL_REF:
2117 case CONST_INT:
2118 case CONST:
2119 case CONST_DOUBLE:
69ef87e2 2120 case CONST_VECTOR:
2b067faf
RS
2121 case CC0:
2122 case PC:
2123 case REG:
2124 case SCRATCH:
2125 case CLOBBER:
2b067faf
RS
2126 case ADDR_VEC:
2127 case ADDR_DIFF_VEC:
2128 case CALL:
2129 case MEM:
2130 return 0;
2131
2132 case UNSPEC_VOLATILE:
2133 /* case TRAP_IF: This isn't clear yet. */
2134 return 1;
2135
4c46ea23 2136 case ASM_INPUT:
2b067faf
RS
2137 case ASM_OPERANDS:
2138 if (MEM_VOLATILE_P (x))
2139 return 1;
e9a25f70
JL
2140
2141 default:
2142 break;
2b067faf
RS
2143 }
2144
2145 /* Recursively scan the operands of this expression. */
2146
2147 {
b3694847
SS
2148 const char *fmt = GET_RTX_FORMAT (code);
2149 int i;
a6a2274a 2150
2b067faf
RS
2151 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2152 {
2153 if (fmt[i] == 'e')
2154 {
31001f72 2155 if (volatile_insn_p (XEXP (x, i)))
2b067faf
RS
2156 return 1;
2157 }
d4757e6a 2158 else if (fmt[i] == 'E')
2b067faf 2159 {
b3694847 2160 int j;
2b067faf 2161 for (j = 0; j < XVECLEN (x, i); j++)
31001f72 2162 if (volatile_insn_p (XVECEXP (x, i, j)))
2b067faf
RS
2163 return 1;
2164 }
2165 }
2166 }
2167 return 0;
2168}
2169
2c88418c 2170/* Nonzero if X contains any volatile memory references
2ac4fed0 2171 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2c88418c
RS
2172
2173int
0c20a65f 2174volatile_refs_p (rtx x)
2c88418c 2175{
b3694847 2176 RTX_CODE code;
2c88418c
RS
2177
2178 code = GET_CODE (x);
2179 switch (code)
2180 {
2181 case LABEL_REF:
2182 case SYMBOL_REF:
2183 case CONST_INT:
2184 case CONST:
2185 case CONST_DOUBLE:
69ef87e2 2186 case CONST_VECTOR:
2c88418c
RS
2187 case CC0:
2188 case PC:
2189 case REG:
2190 case SCRATCH:
2191 case CLOBBER:
2c88418c
RS
2192 case ADDR_VEC:
2193 case ADDR_DIFF_VEC:
2194 return 0;
2195
2ac4fed0 2196 case UNSPEC_VOLATILE:
2c88418c
RS
2197 return 1;
2198
2199 case MEM:
4c46ea23 2200 case ASM_INPUT:
2c88418c
RS
2201 case ASM_OPERANDS:
2202 if (MEM_VOLATILE_P (x))
2203 return 1;
e9a25f70
JL
2204
2205 default:
2206 break;
2c88418c
RS
2207 }
2208
2209 /* Recursively scan the operands of this expression. */
2210
2211 {
b3694847
SS
2212 const char *fmt = GET_RTX_FORMAT (code);
2213 int i;
a6a2274a 2214
2c88418c
RS
2215 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2216 {
2217 if (fmt[i] == 'e')
2218 {
2219 if (volatile_refs_p (XEXP (x, i)))
2220 return 1;
2221 }
d4757e6a 2222 else if (fmt[i] == 'E')
2c88418c 2223 {
b3694847 2224 int j;
2c88418c
RS
2225 for (j = 0; j < XVECLEN (x, i); j++)
2226 if (volatile_refs_p (XVECEXP (x, i, j)))
2227 return 1;
2228 }
2229 }
2230 }
2231 return 0;
2232}
2233
2234/* Similar to above, except that it also rejects register pre- and post-
2235 incrementing. */
2236
2237int
0c20a65f 2238side_effects_p (rtx x)
2c88418c 2239{
b3694847 2240 RTX_CODE code;
2c88418c
RS
2241
2242 code = GET_CODE (x);
2243 switch (code)
2244 {
2245 case LABEL_REF:
2246 case SYMBOL_REF:
2247 case CONST_INT:
2248 case CONST:
2249 case CONST_DOUBLE:
69ef87e2 2250 case CONST_VECTOR:
2c88418c
RS
2251 case CC0:
2252 case PC:
2253 case REG:
2254 case SCRATCH:
2c88418c
RS
2255 case ADDR_VEC:
2256 case ADDR_DIFF_VEC:
2257 return 0;
2258
2259 case CLOBBER:
2260 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2261 when some combination can't be done. If we see one, don't think
2262 that we can simplify the expression. */
2263 return (GET_MODE (x) != VOIDmode);
2264
2265 case PRE_INC:
2266 case PRE_DEC:
2267 case POST_INC:
2268 case POST_DEC:
1fb9c5cd
MH
2269 case PRE_MODIFY:
2270 case POST_MODIFY:
2c88418c 2271 case CALL:
2ac4fed0 2272 case UNSPEC_VOLATILE:
2c88418c
RS
2273 /* case TRAP_IF: This isn't clear yet. */
2274 return 1;
2275
2276 case MEM:
4c46ea23 2277 case ASM_INPUT:
2c88418c
RS
2278 case ASM_OPERANDS:
2279 if (MEM_VOLATILE_P (x))
2280 return 1;
e9a25f70
JL
2281
2282 default:
2283 break;
2c88418c
RS
2284 }
2285
2286 /* Recursively scan the operands of this expression. */
2287
2288 {
b3694847
SS
2289 const char *fmt = GET_RTX_FORMAT (code);
2290 int i;
a6a2274a 2291
2c88418c
RS
2292 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2293 {
2294 if (fmt[i] == 'e')
2295 {
2296 if (side_effects_p (XEXP (x, i)))
2297 return 1;
2298 }
d4757e6a 2299 else if (fmt[i] == 'E')
2c88418c 2300 {
b3694847 2301 int j;
2c88418c
RS
2302 for (j = 0; j < XVECLEN (x, i); j++)
2303 if (side_effects_p (XVECEXP (x, i, j)))
2304 return 1;
2305 }
2306 }
2307 }
2308 return 0;
2309}
2310\f
2311/* Return nonzero if evaluating rtx X might cause a trap. */
2312
2313int
0c20a65f 2314may_trap_p (rtx x)
2c88418c
RS
2315{
2316 int i;
2317 enum rtx_code code;
6f7d635c 2318 const char *fmt;
2c88418c
RS
2319
2320 if (x == 0)
2321 return 0;
2322 code = GET_CODE (x);
2323 switch (code)
2324 {
2325 /* Handle these cases quickly. */
2326 case CONST_INT:
2327 case CONST_DOUBLE:
69ef87e2 2328 case CONST_VECTOR:
2c88418c
RS
2329 case SYMBOL_REF:
2330 case LABEL_REF:
2331 case CONST:
2332 case PC:
2333 case CC0:
2334 case REG:
2335 case SCRATCH:
2336 return 0;
2337
22aa60a1 2338 case ASM_INPUT:
2ac4fed0 2339 case UNSPEC_VOLATILE:
2c88418c
RS
2340 case TRAP_IF:
2341 return 1;
2342
22aa60a1
RH
2343 case ASM_OPERANDS:
2344 return MEM_VOLATILE_P (x);
2345
2c88418c
RS
2346 /* Memory ref can trap unless it's a static var or a stack slot. */
2347 case MEM:
4da2eb6b
RH
2348 if (MEM_NOTRAP_P (x))
2349 return 0;
2c88418c
RS
2350 return rtx_addr_can_trap_p (XEXP (x, 0));
2351
2352 /* Division by a non-constant might trap. */
2353 case DIV:
2354 case MOD:
2355 case UDIV:
2356 case UMOD:
52bfebf0
RS
2357 if (HONOR_SNANS (GET_MODE (x)))
2358 return 1;
e9a25f70 2359 if (! CONSTANT_P (XEXP (x, 1))
f5eb5fd0
JH
2360 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2361 && flag_trapping_math))
2c88418c 2362 return 1;
30c3267c 2363 if (XEXP (x, 1) == const0_rtx)
2c88418c 2364 return 1;
e9a25f70
JL
2365 break;
2366
b278301b
RK
2367 case EXPR_LIST:
2368 /* An EXPR_LIST is used to represent a function call. This
2369 certainly may trap. */
2370 return 1;
e9a25f70 2371
734508ea
JW
2372 case GE:
2373 case GT:
2374 case LE:
2375 case LT:
19aec195 2376 case LTGT:
55143861 2377 case COMPARE:
734508ea 2378 /* Some floating point comparisons may trap. */
f5eb5fd0
JH
2379 if (!flag_trapping_math)
2380 break;
734508ea
JW
2381 /* ??? There is no machine independent way to check for tests that trap
2382 when COMPARE is used, though many targets do make this distinction.
2383 For instance, sparc uses CCFPE for compares which generate exceptions
2384 and CCFP for compares which do not generate exceptions. */
52bfebf0 2385 if (HONOR_NANS (GET_MODE (x)))
55143861
JJ
2386 return 1;
2387 /* But often the compare has some CC mode, so check operand
2388 modes as well. */
52bfebf0
RS
2389 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2390 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2391 return 1;
2392 break;
2393
2394 case EQ:
2395 case NE:
2396 if (HONOR_SNANS (GET_MODE (x)))
2397 return 1;
2398 /* Often comparison is CC mode, so check operand modes. */
2399 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2400 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
55143861
JJ
2401 return 1;
2402 break;
2403
22fd5743
FH
2404 case FIX:
2405 /* Conversion of floating point might trap. */
2406 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2407 return 1;
2408 break;
2409
05cc23e8
RH
2410 case NEG:
2411 case ABS:
2412 /* These operations don't trap even with floating point. */
2413 break;
2414
2c88418c
RS
2415 default:
2416 /* Any floating arithmetic may trap. */
f5eb5fd0
JH
2417 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2418 && flag_trapping_math)
2c88418c
RS
2419 return 1;
2420 }
2421
2422 fmt = GET_RTX_FORMAT (code);
2423 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2424 {
2425 if (fmt[i] == 'e')
2426 {
2427 if (may_trap_p (XEXP (x, i)))
2428 return 1;
2429 }
2430 else if (fmt[i] == 'E')
2431 {
b3694847 2432 int j;
2c88418c
RS
2433 for (j = 0; j < XVECLEN (x, i); j++)
2434 if (may_trap_p (XVECEXP (x, i, j)))
2435 return 1;
2436 }
2437 }
2438 return 0;
2439}
2440\f
2441/* Return nonzero if X contains a comparison that is not either EQ or NE,
2442 i.e., an inequality. */
2443
2444int
0c20a65f 2445inequality_comparisons_p (rtx x)
2c88418c 2446{
b3694847
SS
2447 const char *fmt;
2448 int len, i;
2449 enum rtx_code code = GET_CODE (x);
2c88418c
RS
2450
2451 switch (code)
2452 {
2453 case REG:
2454 case SCRATCH:
2455 case PC:
2456 case CC0:
2457 case CONST_INT:
2458 case CONST_DOUBLE:
69ef87e2 2459 case CONST_VECTOR:
2c88418c
RS
2460 case CONST:
2461 case LABEL_REF:
2462 case SYMBOL_REF:
2463 return 0;
2464
2465 case LT:
2466 case LTU:
2467 case GT:
2468 case GTU:
2469 case LE:
2470 case LEU:
2471 case GE:
2472 case GEU:
2473 return 1;
a6a2274a 2474
e9a25f70
JL
2475 default:
2476 break;
2c88418c
RS
2477 }
2478
2479 len = GET_RTX_LENGTH (code);
2480 fmt = GET_RTX_FORMAT (code);
2481
2482 for (i = 0; i < len; i++)
2483 {
2484 if (fmt[i] == 'e')
2485 {
2486 if (inequality_comparisons_p (XEXP (x, i)))
2487 return 1;
2488 }
2489 else if (fmt[i] == 'E')
2490 {
b3694847 2491 int j;
2c88418c
RS
2492 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2493 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2494 return 1;
2495 }
2496 }
a6a2274a 2497
2c88418c
RS
2498 return 0;
2499}
2500\f
1ed0205e
VM
2501/* Replace any occurrence of FROM in X with TO. The function does
2502 not enter into CONST_DOUBLE for the replace.
2c88418c
RS
2503
2504 Note that copying is not done so X must not be shared unless all copies
2505 are to be modified. */
2506
2507rtx
0c20a65f 2508replace_rtx (rtx x, rtx from, rtx to)
2c88418c 2509{
b3694847
SS
2510 int i, j;
2511 const char *fmt;
2c88418c 2512
1ed0205e 2513 /* The following prevents loops occurrence when we change MEM in
dc297297 2514 CONST_DOUBLE onto the same CONST_DOUBLE. */
1ed0205e
VM
2515 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2516 return x;
2517
2c88418c
RS
2518 if (x == from)
2519 return to;
2520
2521 /* Allow this function to make replacements in EXPR_LISTs. */
2522 if (x == 0)
2523 return 0;
2524
9dd791c8
AO
2525 if (GET_CODE (x) == SUBREG)
2526 {
2527 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2528
2529 if (GET_CODE (new) == CONST_INT)
2530 {
2531 x = simplify_subreg (GET_MODE (x), new,
2532 GET_MODE (SUBREG_REG (x)),
2533 SUBREG_BYTE (x));
2534 if (! x)
2535 abort ();
2536 }
2537 else
2538 SUBREG_REG (x) = new;
2539
2540 return x;
2541 }
2542 else if (GET_CODE (x) == ZERO_EXTEND)
2543 {
2544 rtx new = replace_rtx (XEXP (x, 0), from, to);
2545
2546 if (GET_CODE (new) == CONST_INT)
2547 {
2548 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2549 new, GET_MODE (XEXP (x, 0)));
2550 if (! x)
2551 abort ();
2552 }
2553 else
2554 XEXP (x, 0) = new;
2555
2556 return x;
2557 }
2558
2c88418c
RS
2559 fmt = GET_RTX_FORMAT (GET_CODE (x));
2560 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2561 {
2562 if (fmt[i] == 'e')
2563 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2564 else if (fmt[i] == 'E')
2565 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2566 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2567 }
2568
2569 return x;
a6a2274a 2570}
2c88418c
RS
2571\f
2572/* Throughout the rtx X, replace many registers according to REG_MAP.
2573 Return the replacement for X (which may be X with altered contents).
2574 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
a6a2274a 2575 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2c88418c
RS
2576
2577 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2578 should not be mapped to pseudos or vice versa since validate_change
2579 is not called.
2580
2581 If REPLACE_DEST is 1, replacements are also done in destinations;
2582 otherwise, only sources are replaced. */
2583
2584rtx
0c20a65f 2585replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2c88418c 2586{
b3694847
SS
2587 enum rtx_code code;
2588 int i;
2589 const char *fmt;
2c88418c
RS
2590
2591 if (x == 0)
2592 return x;
2593
2594 code = GET_CODE (x);
2595 switch (code)
2596 {
2597 case SCRATCH:
2598 case PC:
2599 case CC0:
2600 case CONST_INT:
2601 case CONST_DOUBLE:
69ef87e2 2602 case CONST_VECTOR:
2c88418c
RS
2603 case CONST:
2604 case SYMBOL_REF:
2605 case LABEL_REF:
2606 return x;
2607
2608 case REG:
2609 /* Verify that the register has an entry before trying to access it. */
2610 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
3eb8f14c
JW
2611 {
2612 /* SUBREGs can't be shared. Always return a copy to ensure that if
2613 this replacement occurs more than once then each instance will
2614 get distinct rtx. */
2615 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2616 return copy_rtx (reg_map[REGNO (x)]);
2617 return reg_map[REGNO (x)];
2618 }
2c88418c
RS
2619 return x;
2620
2621 case SUBREG:
2622 /* Prevent making nested SUBREGs. */
f8cfc6aa 2623 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2c88418c
RS
2624 && reg_map[REGNO (SUBREG_REG (x))] != 0
2625 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2626 {
2627 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
e0e08ac2 2628 return simplify_gen_subreg (GET_MODE (x), map_val,
a6a2274a 2629 GET_MODE (SUBREG_REG (x)),
e0e08ac2 2630 SUBREG_BYTE (x));
2c88418c
RS
2631 }
2632 break;
2633
2634 case SET:
2635 if (replace_dest)
2636 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2637
3c0cb5de 2638 else if (MEM_P (SET_DEST (x))
2c88418c
RS
2639 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2640 /* Even if we are not to replace destinations, replace register if it
2641 is CONTAINED in destination (destination is memory or
2642 STRICT_LOW_PART). */
2643 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2644 reg_map, nregs, 0);
2645 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2646 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2647 break;
2648
2649 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2650 return x;
a6a2274a 2651
e9a25f70
JL
2652 default:
2653 break;
2c88418c
RS
2654 }
2655
2656 fmt = GET_RTX_FORMAT (code);
2657 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2658 {
2659 if (fmt[i] == 'e')
2660 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
d4757e6a 2661 else if (fmt[i] == 'E')
2c88418c 2662 {
b3694847 2663 int j;
2c88418c
RS
2664 for (j = 0; j < XVECLEN (x, i); j++)
2665 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2666 nregs, replace_dest);
2667 }
2668 }
2669 return x;
2670}
2a1777af 2671
39811184 2672/* Replace occurrences of the old label in *X with the new one.
4af16369 2673 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
39811184
JZ
2674
2675int
0c20a65f 2676replace_label (rtx *x, void *data)
39811184
JZ
2677{
2678 rtx l = *x;
4af16369
JZ
2679 rtx old_label = ((replace_label_data *) data)->r1;
2680 rtx new_label = ((replace_label_data *) data)->r2;
2681 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
39811184
JZ
2682
2683 if (l == NULL_RTX)
2684 return 0;
2685
173cd571
JZ
2686 if (GET_CODE (l) == SYMBOL_REF
2687 && CONSTANT_POOL_ADDRESS_P (l))
4af16369 2688 {
173cd571 2689 rtx c = get_pool_constant (l);
4af16369
JZ
2690 if (rtx_referenced_p (old_label, c))
2691 {
2692 rtx new_c, new_l;
2693 replace_label_data *d = (replace_label_data *) data;
0c20a65f 2694
4af16369
JZ
2695 /* Create a copy of constant C; replace the label inside
2696 but do not update LABEL_NUSES because uses in constant pool
2697 are not counted. */
2698 new_c = copy_rtx (c);
2699 d->update_label_nuses = false;
2700 for_each_rtx (&new_c, replace_label, data);
2701 d->update_label_nuses = update_label_nuses;
2702
2703 /* Add the new constant NEW_C to constant pool and replace
2704 the old reference to constant by new reference. */
173cd571 2705 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
4af16369
JZ
2706 *x = replace_rtx (l, l, new_l);
2707 }
2708 return 0;
2709 }
2710
39811184
JZ
2711 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2712 field. This is not handled by for_each_rtx because it doesn't
2713 handle unprinted ('0') fields. */
4b4bf941 2714 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
39811184 2715 JUMP_LABEL (l) = new_label;
39811184 2716
4af16369
JZ
2717 if ((GET_CODE (l) == LABEL_REF
2718 || GET_CODE (l) == INSN_LIST)
2719 && XEXP (l, 0) == old_label)
2720 {
2721 XEXP (l, 0) = new_label;
2722 if (update_label_nuses)
2723 {
2724 ++LABEL_NUSES (new_label);
2725 --LABEL_NUSES (old_label);
2726 }
2727 return 0;
2728 }
39811184
JZ
2729
2730 return 0;
2731}
2732
4af16369
JZ
2733/* When *BODY is equal to X or X is directly referenced by *BODY
2734 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2735 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
39811184
JZ
2736
2737static int
0c20a65f 2738rtx_referenced_p_1 (rtx *body, void *x)
39811184 2739{
4af16369
JZ
2740 rtx y = (rtx) x;
2741
2742 if (*body == NULL_RTX)
2743 return y == NULL_RTX;
2744
2745 /* Return true if a label_ref *BODY refers to label Y. */
4b4bf941 2746 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
4af16369
JZ
2747 return XEXP (*body, 0) == y;
2748
2749 /* If *BODY is a reference to pool constant traverse the constant. */
2750 if (GET_CODE (*body) == SYMBOL_REF
2751 && CONSTANT_POOL_ADDRESS_P (*body))
2752 return rtx_referenced_p (y, get_pool_constant (*body));
2753
2754 /* By default, compare the RTL expressions. */
2755 return rtx_equal_p (*body, y);
39811184
JZ
2756}
2757
4af16369 2758/* Return true if X is referenced in BODY. */
39811184
JZ
2759
2760int
0c20a65f 2761rtx_referenced_p (rtx x, rtx body)
39811184 2762{
4af16369 2763 return for_each_rtx (&body, rtx_referenced_p_1, x);
39811184
JZ
2764}
2765
ee735eef
JZ
2766/* If INSN is a tablejump return true and store the label (before jump table) to
2767 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
39811184
JZ
2768
2769bool
ee735eef 2770tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
39811184 2771{
ee735eef
JZ
2772 rtx label, table;
2773
4b4bf941 2774 if (JUMP_P (insn)
ee735eef
JZ
2775 && (label = JUMP_LABEL (insn)) != NULL_RTX
2776 && (table = next_active_insn (label)) != NULL_RTX
4b4bf941 2777 && JUMP_P (table)
ee735eef
JZ
2778 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2779 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
39811184 2780 {
ee735eef
JZ
2781 if (labelp)
2782 *labelp = label;
2783 if (tablep)
2784 *tablep = table;
39811184
JZ
2785 return true;
2786 }
2787 return false;
2788}
2789
fce7e199
RH
2790/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2791 constant that is not in the constant pool and not in the condition
2792 of an IF_THEN_ELSE. */
2a1777af
JL
2793
2794static int
0c20a65f 2795computed_jump_p_1 (rtx x)
2a1777af
JL
2796{
2797 enum rtx_code code = GET_CODE (x);
2798 int i, j;
6f7d635c 2799 const char *fmt;
2a1777af
JL
2800
2801 switch (code)
2802 {
2a1777af
JL
2803 case LABEL_REF:
2804 case PC:
2805 return 0;
2806
fce7e199
RH
2807 case CONST:
2808 case CONST_INT:
2809 case CONST_DOUBLE:
69ef87e2 2810 case CONST_VECTOR:
fce7e199 2811 case SYMBOL_REF:
2a1777af
JL
2812 case REG:
2813 return 1;
2814
2815 case MEM:
2816 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2817 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2818
2819 case IF_THEN_ELSE:
fce7e199
RH
2820 return (computed_jump_p_1 (XEXP (x, 1))
2821 || computed_jump_p_1 (XEXP (x, 2)));
1d300e19
KG
2822
2823 default:
2824 break;
2a1777af
JL
2825 }
2826
2827 fmt = GET_RTX_FORMAT (code);
2828 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2829 {
2830 if (fmt[i] == 'e'
fce7e199 2831 && computed_jump_p_1 (XEXP (x, i)))
2a1777af
JL
2832 return 1;
2833
d4757e6a 2834 else if (fmt[i] == 'E')
2a1777af 2835 for (j = 0; j < XVECLEN (x, i); j++)
fce7e199 2836 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2a1777af
JL
2837 return 1;
2838 }
2839
2840 return 0;
2841}
2842
2843/* Return nonzero if INSN is an indirect jump (aka computed jump).
2844
2845 Tablejumps and casesi insns are not considered indirect jumps;
4eb00163 2846 we can recognize them by a (use (label_ref)). */
2a1777af
JL
2847
2848int
0c20a65f 2849computed_jump_p (rtx insn)
2a1777af
JL
2850{
2851 int i;
4b4bf941 2852 if (JUMP_P (insn))
2a1777af
JL
2853 {
2854 rtx pat = PATTERN (insn);
2a1777af 2855
f759eb8b
AO
2856 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2857 return 0;
2858 else if (GET_CODE (pat) == PARALLEL)
2a1777af
JL
2859 {
2860 int len = XVECLEN (pat, 0);
2861 int has_use_labelref = 0;
2862
2863 for (i = len - 1; i >= 0; i--)
2864 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2865 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2866 == LABEL_REF))
2867 has_use_labelref = 1;
2868
2869 if (! has_use_labelref)
2870 for (i = len - 1; i >= 0; i--)
2871 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2872 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
fce7e199 2873 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2a1777af
JL
2874 return 1;
2875 }
2876 else if (GET_CODE (pat) == SET
2877 && SET_DEST (pat) == pc_rtx
fce7e199 2878 && computed_jump_p_1 (SET_SRC (pat)))
2a1777af
JL
2879 return 1;
2880 }
2881 return 0;
2882}
ccc2d6d0
MM
2883
2884/* Traverse X via depth-first search, calling F for each
2885 sub-expression (including X itself). F is also passed the DATA.
2886 If F returns -1, do not traverse sub-expressions, but continue
2887 traversing the rest of the tree. If F ever returns any other
40f03658 2888 nonzero value, stop the traversal, and return the value returned
ccc2d6d0
MM
2889 by F. Otherwise, return 0. This function does not traverse inside
2890 tree structure that contains RTX_EXPRs, or into sub-expressions
2891 whose format code is `0' since it is not known whether or not those
2892 codes are actually RTL.
2893
2894 This routine is very general, and could (should?) be used to
2895 implement many of the other routines in this file. */
2896
ae0b51ef 2897int
0c20a65f 2898for_each_rtx (rtx *x, rtx_function f, void *data)
ccc2d6d0
MM
2899{
2900 int result;
2901 int length;
b987f237 2902 const char *format;
ccc2d6d0
MM
2903 int i;
2904
2905 /* Call F on X. */
b987f237 2906 result = (*f) (x, data);
ccc2d6d0
MM
2907 if (result == -1)
2908 /* Do not traverse sub-expressions. */
2909 return 0;
2910 else if (result != 0)
2911 /* Stop the traversal. */
2912 return result;
2913
2914 if (*x == NULL_RTX)
2915 /* There are no sub-expressions. */
2916 return 0;
2917
2918 length = GET_RTX_LENGTH (GET_CODE (*x));
2919 format = GET_RTX_FORMAT (GET_CODE (*x));
2920
a6a2274a 2921 for (i = 0; i < length; ++i)
ccc2d6d0 2922 {
a6a2274a 2923 switch (format[i])
ccc2d6d0
MM
2924 {
2925 case 'e':
2926 result = for_each_rtx (&XEXP (*x, i), f, data);
2927 if (result != 0)
2928 return result;
2929 break;
2930
2931 case 'V':
2932 case 'E':
a6a2274a 2933 if (XVEC (*x, i) != 0)
ccc2d6d0
MM
2934 {
2935 int j;
2936 for (j = 0; j < XVECLEN (*x, i); ++j)
2937 {
2938 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2939 if (result != 0)
2940 return result;
2941 }
2942 }
a6a2274a 2943 break;
ccc2d6d0
MM
2944
2945 default:
2946 /* Nothing to do. */
2947 break;
2948 }
2949
2950 }
2951
2952 return 0;
2953}
3ec2b590 2954
777b1b71
RH
2955/* Searches X for any reference to REGNO, returning the rtx of the
2956 reference found if any. Otherwise, returns NULL_RTX. */
2957
2958rtx
0c20a65f 2959regno_use_in (unsigned int regno, rtx x)
777b1b71 2960{
b3694847 2961 const char *fmt;
777b1b71
RH
2962 int i, j;
2963 rtx tem;
2964
f8cfc6aa 2965 if (REG_P (x) && REGNO (x) == regno)
777b1b71
RH
2966 return x;
2967
2968 fmt = GET_RTX_FORMAT (GET_CODE (x));
2969 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2970 {
2971 if (fmt[i] == 'e')
2972 {
2973 if ((tem = regno_use_in (regno, XEXP (x, i))))
2974 return tem;
2975 }
2976 else if (fmt[i] == 'E')
2977 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2978 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2979 return tem;
2980 }
2981
2982 return NULL_RTX;
2983}
2dfa9a87 2984
e5c56fd9
JH
2985/* Return a value indicating whether OP, an operand of a commutative
2986 operation, is preferred as the first or second operand. The higher
2987 the value, the stronger the preference for being the first operand.
2988 We use negative values to indicate a preference for the first operand
2989 and positive values for the second operand. */
2990
9b3bd424 2991int
0c20a65f 2992commutative_operand_precedence (rtx op)
e5c56fd9 2993{
e3d6e740 2994 enum rtx_code code = GET_CODE (op);
e3d6e740 2995
e5c56fd9 2996 /* Constants always come the second operand. Prefer "nice" constants. */
e3d6e740 2997 if (code == CONST_INT)
9ce79a7a 2998 return -7;
e3d6e740 2999 if (code == CONST_DOUBLE)
9ce79a7a
RS
3000 return -6;
3001 op = avoid_constant_pool_reference (op);
ec8e098d
PB
3002
3003 switch (GET_RTX_CLASS (code))
3004 {
3005 case RTX_CONST_OBJ:
3006 if (code == CONST_INT)
3007 return -5;
3008 if (code == CONST_DOUBLE)
3009 return -4;
3010 return -3;
3011
3012 case RTX_EXTRA:
3013 /* SUBREGs of objects should come second. */
3014 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3015 return -2;
3016
3017 if (!CONSTANT_P (op))
3018 return 0;
3019 else
3020 /* As for RTX_CONST_OBJ. */
3021 return -3;
3022
3023 case RTX_OBJ:
3024 /* Complex expressions should be the first, so decrease priority
3025 of objects. */
3026 return -1;
3027
3028 case RTX_COMM_ARITH:
3029 /* Prefer operands that are themselves commutative to be first.
3030 This helps to make things linear. In particular,
3031 (and (and (reg) (reg)) (not (reg))) is canonical. */
3032 return 4;
3033
3034 case RTX_BIN_ARITH:
3035 /* If only one operand is a binary expression, it will be the first
3036 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3037 is canonical, although it will usually be further simplified. */
3038 return 2;
e3d6e740 3039
ec8e098d
PB
3040 case RTX_UNARY:
3041 /* Then prefer NEG and NOT. */
3042 if (code == NEG || code == NOT)
3043 return 1;
e5c56fd9 3044
ec8e098d
PB
3045 default:
3046 return 0;
3047 }
e5c56fd9
JH
3048}
3049
f63d1bf7 3050/* Return 1 iff it is necessary to swap operands of commutative operation
e5c56fd9
JH
3051 in order to canonicalize expression. */
3052
3053int
0c20a65f 3054swap_commutative_operands_p (rtx x, rtx y)
e5c56fd9 3055{
9b3bd424
RH
3056 return (commutative_operand_precedence (x)
3057 < commutative_operand_precedence (y));
e5c56fd9 3058}
2dfa9a87
MH
3059
3060/* Return 1 if X is an autoincrement side effect and the register is
3061 not the stack pointer. */
3062int
0c20a65f 3063auto_inc_p (rtx x)
2dfa9a87
MH
3064{
3065 switch (GET_CODE (x))
3066 {
3067 case PRE_INC:
3068 case POST_INC:
3069 case PRE_DEC:
3070 case POST_DEC:
3071 case PRE_MODIFY:
3072 case POST_MODIFY:
3073 /* There are no REG_INC notes for SP. */
3074 if (XEXP (x, 0) != stack_pointer_rtx)
3075 return 1;
3076 default:
3077 break;
3078 }
3079 return 0;
3080}
3b10cf4b
MM
3081
3082/* Return 1 if the sequence of instructions beginning with FROM and up
3083 to and including TO is safe to move. If NEW_TO is non-NULL, and
3084 the sequence is not already safe to move, but can be easily
3085 extended to a sequence which is safe, then NEW_TO will point to the
a6a2274a
KH
3086 end of the extended sequence.
3087
7015a814 3088 For now, this function only checks that the region contains whole
5bea1ccf 3089 exception regions, but it could be extended to check additional
7015a814 3090 conditions as well. */
3b10cf4b
MM
3091
3092int
0c20a65f 3093insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3b10cf4b
MM
3094{
3095 int eh_region_count = 0;
3096 int past_to_p = 0;
3097 rtx r = from;
3098
7015a814
MM
3099 /* By default, assume the end of the region will be what was
3100 suggested. */
3101 if (new_to)
3102 *new_to = to;
3103
3b10cf4b
MM
3104 while (r)
3105 {
4b4bf941 3106 if (NOTE_P (r))
3b10cf4b
MM
3107 {
3108 switch (NOTE_LINE_NUMBER (r))
3109 {
3110 case NOTE_INSN_EH_REGION_BEG:
3111 ++eh_region_count;
3112 break;
3113
3114 case NOTE_INSN_EH_REGION_END:
3115 if (eh_region_count == 0)
3116 /* This sequence of instructions contains the end of
3117 an exception region, but not he beginning. Moving
3118 it will cause chaos. */
3119 return 0;
3120
3121 --eh_region_count;
3122 break;
3123
3124 default:
3125 break;
3126 }
3127 }
3128 else if (past_to_p)
3129 /* If we've passed TO, and we see a non-note instruction, we
3130 can't extend the sequence to a movable sequence. */
3131 return 0;
3132
3133 if (r == to)
3134 {
3135 if (!new_to)
3136 /* It's OK to move the sequence if there were matched sets of
3137 exception region notes. */
3138 return eh_region_count == 0;
a6a2274a 3139
3b10cf4b
MM
3140 past_to_p = 1;
3141 }
3142
3143 /* It's OK to move the sequence if there were matched sets of
3144 exception region notes. */
3145 if (past_to_p && eh_region_count == 0)
3146 {
3147 *new_to = r;
3148 return 1;
3149 }
3150
3151 /* Go to the next instruction. */
3152 r = NEXT_INSN (r);
3153 }
a6a2274a 3154
3b10cf4b
MM
3155 return 0;
3156}
db7ba742 3157
f9da5064 3158/* Return nonzero if IN contains a piece of rtl that has the address LOC. */
db7ba742 3159int
0c20a65f 3160loc_mentioned_in_p (rtx *loc, rtx in)
db7ba742
R
3161{
3162 enum rtx_code code = GET_CODE (in);
3163 const char *fmt = GET_RTX_FORMAT (code);
3164 int i, j;
3165
3166 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3167 {
e1de1560 3168 if (loc == &in->u.fld[i].rtx)
db7ba742
R
3169 return 1;
3170 if (fmt[i] == 'e')
3171 {
3172 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3173 return 1;
3174 }
3175 else if (fmt[i] == 'E')
3176 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3177 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3178 return 1;
3179 }
3180 return 0;
3181}
ddef6bc7 3182
bb51e270
RS
3183/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3184 and SUBREG_BYTE, return the bit offset where the subreg begins
3185 (counting from the least significant bit of the operand). */
33aceff2
JW
3186
3187unsigned int
bb51e270
RS
3188subreg_lsb_1 (enum machine_mode outer_mode,
3189 enum machine_mode inner_mode,
3190 unsigned int subreg_byte)
33aceff2 3191{
33aceff2
JW
3192 unsigned int bitpos;
3193 unsigned int byte;
3194 unsigned int word;
3195
3196 /* A paradoxical subreg begins at bit position 0. */
bb51e270 3197 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
33aceff2
JW
3198 return 0;
3199
3200 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3201 /* If the subreg crosses a word boundary ensure that
3202 it also begins and ends on a word boundary. */
bb51e270
RS
3203 if ((subreg_byte % UNITS_PER_WORD
3204 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3205 && (subreg_byte % UNITS_PER_WORD
3206 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))
33aceff2
JW
3207 abort ();
3208
3209 if (WORDS_BIG_ENDIAN)
3210 word = (GET_MODE_SIZE (inner_mode)
bb51e270 3211 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
33aceff2 3212 else
bb51e270 3213 word = subreg_byte / UNITS_PER_WORD;
33aceff2
JW
3214 bitpos = word * BITS_PER_WORD;
3215
3216 if (BYTES_BIG_ENDIAN)
3217 byte = (GET_MODE_SIZE (inner_mode)
bb51e270 3218 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
33aceff2 3219 else
bb51e270 3220 byte = subreg_byte % UNITS_PER_WORD;
33aceff2
JW
3221 bitpos += byte * BITS_PER_UNIT;
3222
3223 return bitpos;
3224}
3225
bb51e270
RS
3226/* Given a subreg X, return the bit offset where the subreg begins
3227 (counting from the least significant bit of the reg). */
3228
3229unsigned int
3230subreg_lsb (rtx x)
3231{
3232 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3233 SUBREG_BYTE (x));
3234}
3235
ddef6bc7
JJ
3236/* This function returns the regno offset of a subreg expression.
3237 xregno - A regno of an inner hard subreg_reg (or what will become one).
3238 xmode - The mode of xregno.
3239 offset - The byte offset.
3240 ymode - The mode of a top level SUBREG (or what may become one).
5809eb5f 3241 RETURN - The regno offset which would be used. */
ddef6bc7 3242unsigned int
0c20a65f
AJ
3243subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3244 unsigned int offset, enum machine_mode ymode)
ddef6bc7 3245{
ddef6bc7
JJ
3246 int nregs_xmode, nregs_ymode;
3247 int mode_multiple, nregs_multiple;
3248 int y_offset;
3249
ddef6bc7
JJ
3250 if (xregno >= FIRST_PSEUDO_REGISTER)
3251 abort ();
3252
66fd46b6
JH
3253 nregs_xmode = hard_regno_nregs[xregno][xmode];
3254 nregs_ymode = hard_regno_nregs[xregno][ymode];
eab2120d
R
3255
3256 /* If this is a big endian paradoxical subreg, which uses more actual
3257 hard registers than the original register, we must return a negative
3258 offset so that we find the proper highpart of the register. */
3259 if (offset == 0
3260 && nregs_ymode > nregs_xmode
3261 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3262 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3263 return nregs_xmode - nregs_ymode;
3264
ddef6bc7
JJ
3265 if (offset == 0 || nregs_xmode == nregs_ymode)
3266 return 0;
a6a2274a 3267
ddef6bc7
JJ
3268 /* size of ymode must not be greater than the size of xmode. */
3269 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3270 if (mode_multiple == 0)
3271 abort ();
3272
3273 y_offset = offset / GET_MODE_SIZE (ymode);
3274 nregs_multiple = nregs_xmode / nregs_ymode;
5809eb5f 3275 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
ddef6bc7
JJ
3276}
3277
04c5580f
JH
3278/* This function returns true when the offset is representable via
3279 subreg_offset in the given regno.
3280 xregno - A regno of an inner hard subreg_reg (or what will become one).
3281 xmode - The mode of xregno.
3282 offset - The byte offset.
3283 ymode - The mode of a top level SUBREG (or what may become one).
3284 RETURN - The regno offset which would be used. */
3285bool
0c20a65f
AJ
3286subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3287 unsigned int offset, enum machine_mode ymode)
04c5580f
JH
3288{
3289 int nregs_xmode, nregs_ymode;
3290 int mode_multiple, nregs_multiple;
3291 int y_offset;
3292
3293 if (xregno >= FIRST_PSEUDO_REGISTER)
3294 abort ();
3295
66fd46b6
JH
3296 nregs_xmode = hard_regno_nregs[xregno][xmode];
3297 nregs_ymode = hard_regno_nregs[xregno][ymode];
04c5580f 3298
a1105617 3299 /* Paradoxical subregs are always valid. */
04c5580f
JH
3300 if (offset == 0
3301 && nregs_ymode > nregs_xmode
3302 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3303 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3304 return true;
3305
3306 /* Lowpart subregs are always valid. */
3307 if (offset == subreg_lowpart_offset (ymode, xmode))
3308 return true;
3309
3310#ifdef ENABLE_CHECKING
3311 /* This should always pass, otherwise we don't know how to verify the
c63f3a34
RH
3312 constraint. These conditions may be relaxed but subreg_offset would
3313 need to be redesigned. */
04c5580f
JH
3314 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3315 || GET_MODE_SIZE (ymode) % nregs_ymode
04c5580f
JH
3316 || nregs_xmode % nregs_ymode)
3317 abort ();
3318#endif
3319
b20b352b 3320 /* The XMODE value can be seen as a vector of NREGS_XMODE
dcc24678 3321 values. The subreg must represent a lowpart of given field.
04c5580f 3322 Compute what field it is. */
0c20a65f
AJ
3323 offset -= subreg_lowpart_offset (ymode,
3324 mode_for_size (GET_MODE_BITSIZE (xmode)
3325 / nregs_xmode,
07015444 3326 MODE_INT, 0));
04c5580f
JH
3327
3328 /* size of ymode must not be greater than the size of xmode. */
3329 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3330 if (mode_multiple == 0)
3331 abort ();
3332
3333 y_offset = offset / GET_MODE_SIZE (ymode);
3334 nregs_multiple = nregs_xmode / nregs_ymode;
3335#ifdef ENABLE_CHECKING
3336 if (offset % GET_MODE_SIZE (ymode)
3337 || mode_multiple % nregs_multiple)
3338 abort ();
3339#endif
3340 return (!(y_offset % (mode_multiple / nregs_multiple)));
3341}
3342
dc297297 3343/* Return the final regno that a subreg expression refers to. */
a6a2274a 3344unsigned int
0c20a65f 3345subreg_regno (rtx x)
ddef6bc7
JJ
3346{
3347 unsigned int ret;
3348 rtx subreg = SUBREG_REG (x);
3349 int regno = REGNO (subreg);
3350
a6a2274a
KH
3351 ret = regno + subreg_regno_offset (regno,
3352 GET_MODE (subreg),
ddef6bc7
JJ
3353 SUBREG_BYTE (x),
3354 GET_MODE (x));
3355 return ret;
3356
3357}
833366d6
JH
3358struct parms_set_data
3359{
3360 int nregs;
3361 HARD_REG_SET regs;
3362};
3363
3364/* Helper function for noticing stores to parameter registers. */
3365static void
0c20a65f 3366parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
833366d6
JH
3367{
3368 struct parms_set_data *d = data;
3369 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3370 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3371 {
3372 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3373 d->nregs--;
3374 }
3375}
3376
a6a2274a 3377/* Look backward for first parameter to be loaded.
833366d6
JH
3378 Do not skip BOUNDARY. */
3379rtx
0c20a65f 3380find_first_parameter_load (rtx call_insn, rtx boundary)
833366d6
JH
3381{
3382 struct parms_set_data parm;
3383 rtx p, before;
3384
3385 /* Since different machines initialize their parameter registers
3386 in different orders, assume nothing. Collect the set of all
3387 parameter registers. */
3388 CLEAR_HARD_REG_SET (parm.regs);
3389 parm.nregs = 0;
3390 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3391 if (GET_CODE (XEXP (p, 0)) == USE
f8cfc6aa 3392 && REG_P (XEXP (XEXP (p, 0), 0)))
833366d6
JH
3393 {
3394 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3395 abort ();
3396
3397 /* We only care about registers which can hold function
3398 arguments. */
3399 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3400 continue;
3401
3402 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3403 parm.nregs++;
3404 }
3405 before = call_insn;
3406
3407 /* Search backward for the first set of a register in this set. */
3408 while (parm.nregs && before != boundary)
3409 {
3410 before = PREV_INSN (before);
3411
3412 /* It is possible that some loads got CSEed from one call to
3413 another. Stop in that case. */
4b4bf941 3414 if (CALL_P (before))
833366d6
JH
3415 break;
3416
dbc1a163 3417 /* Our caller needs either ensure that we will find all sets
833366d6 3418 (in case code has not been optimized yet), or take care
eaec9b3d 3419 for possible labels in a way by setting boundary to preceding
833366d6 3420 CODE_LABEL. */
4b4bf941 3421 if (LABEL_P (before))
dbc1a163
RH
3422 {
3423 if (before != boundary)
3424 abort ();
3425 break;
3426 }
833366d6 3427
0d025d43 3428 if (INSN_P (before))
a6a2274a 3429 note_stores (PATTERN (before), parms_set, &parm);
833366d6
JH
3430 }
3431 return before;
3432}
3dec4024 3433
14b493d6 3434/* Return true if we should avoid inserting code between INSN and preceding
3dec4024
JH
3435 call instruction. */
3436
3437bool
0c20a65f 3438keep_with_call_p (rtx insn)
3dec4024
JH
3439{
3440 rtx set;
3441
3442 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3443 {
f8cfc6aa 3444 if (REG_P (SET_DEST (set))
5df533b3 3445 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3dec4024
JH
3446 && fixed_regs[REGNO (SET_DEST (set))]
3447 && general_operand (SET_SRC (set), VOIDmode))
3448 return true;
f8cfc6aa 3449 if (REG_P (SET_SRC (set))
3dec4024 3450 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
f8cfc6aa 3451 && REG_P (SET_DEST (set))
3dec4024
JH
3452 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3453 return true;
bc204393
RH
3454 /* There may be a stack pop just after the call and before the store
3455 of the return register. Search for the actual store when deciding
3456 if we can break or not. */
3dec4024
JH
3457 if (SET_DEST (set) == stack_pointer_rtx)
3458 {
3459 rtx i2 = next_nonnote_insn (insn);
bc204393 3460 if (i2 && keep_with_call_p (i2))
3dec4024
JH
3461 return true;
3462 }
3463 }
3464 return false;
3465}
71d2c5bd
JH
3466
3467/* Return true when store to register X can be hoisted to the place
3468 with LIVE registers (can be NULL). Value VAL contains destination
3469 whose value will be used. */
3470
3471static bool
0c20a65f 3472hoist_test_store (rtx x, rtx val, regset live)
71d2c5bd
JH
3473{
3474 if (GET_CODE (x) == SCRATCH)
3475 return true;
3476
3477 if (rtx_equal_p (x, val))
3478 return true;
3479
e0bb17a8 3480 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
71d2c5bd
JH
3481 Then we would need to update all users to care hoisting the store too.
3482 Caller may represent that by specifying whole subreg as val. */
3483
3484 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3485 {
3486 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3487 && GET_MODE_BITSIZE (GET_MODE (x)) <
3488 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3489 return false;
3490 return true;
3491 }
3492 if (GET_CODE (x) == SUBREG)
3493 x = SUBREG_REG (x);
3494
3495 /* Anything except register store is not hoistable. This includes the
3496 partial stores to registers. */
3497
3498 if (!REG_P (x))
3499 return false;
3500
e0bb17a8 3501 /* Pseudo registers can be always replaced by another pseudo to avoid
71d2c5bd
JH
3502 the side effect, for hard register we must ensure that they are dead.
3503 Eventually we may want to add code to try turn pseudos to hards, but it
3d042e77 3504 is unlikely useful. */
71d2c5bd
JH
3505
3506 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3507 {
3508 int regno = REGNO (x);
66fd46b6 3509 int n = hard_regno_nregs[regno][GET_MODE (x)];
71d2c5bd
JH
3510
3511 if (!live)
3512 return false;
3513 if (REGNO_REG_SET_P (live, regno))
3514 return false;
3515 while (--n > 0)
3516 if (REGNO_REG_SET_P (live, regno + n))
3517 return false;
3518 }
3519 return true;
3520}
3521
3522
3523/* Return true if INSN can be hoisted to place with LIVE hard registers
3524 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3525 and used by the hoisting pass. */
3526
3527bool
0c20a65f 3528can_hoist_insn_p (rtx insn, rtx val, regset live)
71d2c5bd
JH
3529{
3530 rtx pat = PATTERN (insn);
3531 int i;
3532
3533 /* It probably does not worth the complexity to handle multiple
3534 set stores. */
3535 if (!single_set (insn))
3536 return false;
3537 /* We can move CALL_INSN, but we need to check that all caller clobbered
3538 regs are dead. */
4b4bf941 3539 if (CALL_P (insn))
71d2c5bd
JH
3540 return false;
3541 /* In future we will handle hoisting of libcall sequences, but
3542 give up for now. */
3543 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3544 return false;
3545 switch (GET_CODE (pat))
3546 {
3547 case SET:
3548 if (!hoist_test_store (SET_DEST (pat), val, live))
3549 return false;
3550 break;
3551 case USE:
3552 /* USES do have sick semantics, so do not move them. */
3553 return false;
3554 break;
3555 case CLOBBER:
3556 if (!hoist_test_store (XEXP (pat, 0), val, live))
3557 return false;
3558 break;
3559 case PARALLEL:
3560 for (i = 0; i < XVECLEN (pat, 0); i++)
3561 {
3562 rtx x = XVECEXP (pat, 0, i);
3563 switch (GET_CODE (x))
3564 {
3565 case SET:
3566 if (!hoist_test_store (SET_DEST (x), val, live))
3567 return false;
3568 break;
3569 case USE:
3570 /* We need to fix callers to really ensure availability
4d6922ee 3571 of all values insn uses, but for now it is safe to prohibit
14b493d6 3572 hoisting of any insn having such a hidden uses. */
71d2c5bd
JH
3573 return false;
3574 break;
3575 case CLOBBER:
3576 if (!hoist_test_store (SET_DEST (x), val, live))
3577 return false;
3578 break;
3579 default:
3580 break;
3581 }
3582 }
3583 break;
3584 default:
3585 abort ();
3586 }
3587 return true;
3588}
3589
3590/* Update store after hoisting - replace all stores to pseudo registers
3591 by new ones to avoid clobbering of values except for store to VAL that will
3592 be updated to NEW. */
3593
3594static void
0c20a65f 3595hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
71d2c5bd
JH
3596{
3597 rtx x = *xp;
3598
3599 if (GET_CODE (x) == SCRATCH)
3600 return;
3601
3602 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3603 validate_change (insn, xp,
3604 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3605 SUBREG_BYTE (x)), 1);
3606 if (rtx_equal_p (x, val))
3607 {
3608 validate_change (insn, xp, new, 1);
3609 return;
3610 }
3611 if (GET_CODE (x) == SUBREG)
3612 {
3613 xp = &SUBREG_REG (x);
3614 x = *xp;
3615 }
3616
3617 if (!REG_P (x))
3618 abort ();
3619
3620 /* We've verified that hard registers are dead, so we may keep the side
3621 effect. Otherwise replace it by new pseudo. */
3622 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3623 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3624 REG_NOTES (insn)
3625 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3626}
3627
3628/* Create a copy of INSN after AFTER replacing store of VAL to NEW
3629 and each other side effect to pseudo register by new pseudo register. */
3630
3631rtx
0c20a65f 3632hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
71d2c5bd
JH
3633{
3634 rtx pat;
3635 int i;
3636 rtx note;
3637
3638 insn = emit_copy_of_insn_after (insn, after);
3639 pat = PATTERN (insn);
3640
3641 /* Remove REG_UNUSED notes as we will re-emit them. */
3642 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3643 remove_note (insn, note);
3644
3645 /* To get this working callers must ensure to move everything referenced
3646 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3647 easier. */
3648 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3649 remove_note (insn, note);
3650 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3651 remove_note (insn, note);
3652
3653 /* Remove REG_DEAD notes as they might not be valid anymore in case
3654 we create redundancy. */
3655 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3656 remove_note (insn, note);
3657 switch (GET_CODE (pat))
3658 {
3659 case SET:
3660 hoist_update_store (insn, &SET_DEST (pat), val, new);
3661 break;
3662 case USE:
3663 break;
3664 case CLOBBER:
3665 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3666 break;
3667 case PARALLEL:
3668 for (i = 0; i < XVECLEN (pat, 0); i++)
3669 {
3670 rtx x = XVECEXP (pat, 0, i);
3671 switch (GET_CODE (x))
3672 {
3673 case SET:
3674 hoist_update_store (insn, &SET_DEST (x), val, new);
3675 break;
3676 case USE:
3677 break;
3678 case CLOBBER:
3679 hoist_update_store (insn, &SET_DEST (x), val, new);
3680 break;
3681 default:
3682 break;
3683 }
3684 }
3685 break;
3686 default:
3687 abort ();
3688 }
3689 if (!apply_change_group ())
3690 abort ();
3691
3692 return insn;
3693}
3694
3695rtx
0c20a65f 3696hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
71d2c5bd
JH
3697{
3698 rtx new_insn;
3699
3700 /* We cannot insert instructions on an abnormal critical edge.
3701 It will be easier to find the culprit if we die now. */
3702 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3703 abort ();
3704
3705 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
6de9cd9a
DN
3706 stuff. We also emit CALL_INSNS and firends. */
3707 if (e->insns.r == NULL_RTX)
71d2c5bd
JH
3708 {
3709 start_sequence ();
2e040219 3710 emit_note (NOTE_INSN_DELETED);
71d2c5bd
JH
3711 }
3712 else
6de9cd9a 3713 push_to_sequence (e->insns.r);
71d2c5bd
JH
3714
3715 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3716
6de9cd9a 3717 e->insns.r = get_insns ();
71d2c5bd
JH
3718 end_sequence ();
3719 return new_insn;
3720}
432f982f
JH
3721
3722/* Return true if LABEL is a target of JUMP_INSN. This applies only
3723 to non-complex jumps. That is, direct unconditional, conditional,
3724 and tablejumps, but not computed jumps or returns. It also does
3725 not apply to the fallthru case of a conditional jump. */
3726
3727bool
3728label_is_jump_target_p (rtx label, rtx jump_insn)
3729{
3730 rtx tmp = JUMP_LABEL (jump_insn);
3731
3732 if (label == tmp)
3733 return true;
3734
3735 if (tablejump_p (jump_insn, NULL, &tmp))
3736 {
3737 rtvec vec = XVEC (PATTERN (tmp),
3738 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3739 int i, veclen = GET_NUM_ELEM (vec);
3740
3741 for (i = 0; i < veclen; ++i)
3742 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3743 return true;
3744 }
3745
3746 return false;
3747}
3748
f894b69b
PB
3749\f
3750/* Return an estimate of the cost of computing rtx X.
3751 One use is in cse, to decide which expression to keep in the hash table.
3752 Another is in rtl generation, to pick the cheapest way to multiply.
3753 Other uses like the latter are expected in the future. */
3754
3755int
3756rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3757{
3758 int i, j;
3759 enum rtx_code code;
3760 const char *fmt;
3761 int total;
3762
3763 if (x == 0)
3764 return 0;
3765
3766 /* Compute the default costs of certain things.
3767 Note that targetm.rtx_costs can override the defaults. */
3768
3769 code = GET_CODE (x);
3770 switch (code)
3771 {
3772 case MULT:
3773 total = COSTS_N_INSNS (5);
3774 break;
3775 case DIV:
3776 case UDIV:
3777 case MOD:
3778 case UMOD:
3779 total = COSTS_N_INSNS (7);
3780 break;
3781 case USE:
3782 /* Used in loop.c and combine.c as a marker. */
3783 total = 0;
3784 break;
3785 default:
3786 total = COSTS_N_INSNS (1);
3787 }
3788
3789 switch (code)
3790 {
3791 case REG:
3792 return 0;
3793
3794 case SUBREG:
3795 /* If we can't tie these modes, make this expensive. The larger
3796 the mode, the more expensive it is. */
3797 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3798 return COSTS_N_INSNS (2
3799 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3800 break;
3801
3802 default:
5fd9b178 3803 if (targetm.rtx_costs (x, code, outer_code, &total))
f894b69b
PB
3804 return total;
3805 break;
3806 }
3807
3808 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3809 which is already in total. */
3810
3811 fmt = GET_RTX_FORMAT (code);
3812 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3813 if (fmt[i] == 'e')
3814 total += rtx_cost (XEXP (x, i), code);
3815 else if (fmt[i] == 'E')
3816 for (j = 0; j < XVECLEN (x, i); j++)
3817 total += rtx_cost (XVECEXP (x, i, j), code);
3818
3819 return total;
3820}
3821\f
3822/* Return cost of address expression X.
3823 Expect that X is properly formed address reference. */
3824
3825int
3826address_cost (rtx x, enum machine_mode mode)
3827{
f894b69b
PB
3828 /* We may be asked for cost of various unusual addresses, such as operands
3829 of push instruction. It is not worthwhile to complicate writing
3830 of the target hook by such cases. */
3831
3832 if (!memory_address_p (mode, x))
3833 return 1000;
3834
5fd9b178 3835 return targetm.address_cost (x);
f894b69b
PB
3836}
3837
3838/* If the target doesn't override, compute the cost as with arithmetic. */
3839
3840int
3841default_address_cost (rtx x)
3842{
3843 return rtx_cost (x, MEM);
3844}
2f93eea8
PB
3845\f
3846
3847unsigned HOST_WIDE_INT
3848nonzero_bits (rtx x, enum machine_mode mode)
3849{
3850 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3851}
3852
3853unsigned int
3854num_sign_bit_copies (rtx x, enum machine_mode mode)
3855{
3856 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3857}
3858
3859/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3860 It avoids exponential behavior in nonzero_bits1 when X has
3861 identical subexpressions on the first or the second level. */
3862
3863static unsigned HOST_WIDE_INT
3864cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3865 enum machine_mode known_mode,
3866 unsigned HOST_WIDE_INT known_ret)
3867{
3868 if (x == known_x && mode == known_mode)
3869 return known_ret;
3870
3871 /* Try to find identical subexpressions. If found call
3872 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3873 precomputed value for the subexpression as KNOWN_RET. */
3874
3875 if (ARITHMETIC_P (x))
3876 {
3877 rtx x0 = XEXP (x, 0);
3878 rtx x1 = XEXP (x, 1);
3879
3880 /* Check the first level. */
3881 if (x0 == x1)
3882 return nonzero_bits1 (x, mode, x0, mode,
3883 cached_nonzero_bits (x0, mode, known_x,
3884 known_mode, known_ret));
3885
3886 /* Check the second level. */
3887 if (ARITHMETIC_P (x0)
3888 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3889 return nonzero_bits1 (x, mode, x1, mode,
3890 cached_nonzero_bits (x1, mode, known_x,
3891 known_mode, known_ret));
3892
3893 if (ARITHMETIC_P (x1)
3894 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3895 return nonzero_bits1 (x, mode, x0, mode,
3896 cached_nonzero_bits (x0, mode, known_x,
3897 known_mode, known_ret));
3898 }
3899
3900 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3901}
3902
3903/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3904 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3905 is less useful. We can't allow both, because that results in exponential
3906 run time recursion. There is a nullstone testcase that triggered
3907 this. This macro avoids accidental uses of num_sign_bit_copies. */
3908#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3909
3910/* Given an expression, X, compute which bits in X can be nonzero.
3911 We don't care about bits outside of those defined in MODE.
3912
3913 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3914 an arithmetic operation, we can do better. */
3915
3916static unsigned HOST_WIDE_INT
3917nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3918 enum machine_mode known_mode,
3919 unsigned HOST_WIDE_INT known_ret)
3920{
3921 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3922 unsigned HOST_WIDE_INT inner_nz;
3923 enum rtx_code code;
3924 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3925
3926 /* For floating-point values, assume all bits are needed. */
3927 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3928 return nonzero;
3929
3930 /* If X is wider than MODE, use its mode instead. */
3931 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3932 {
3933 mode = GET_MODE (x);
3934 nonzero = GET_MODE_MASK (mode);
3935 mode_width = GET_MODE_BITSIZE (mode);
3936 }
3937
3938 if (mode_width > HOST_BITS_PER_WIDE_INT)
3939 /* Our only callers in this case look for single bit values. So
3940 just return the mode mask. Those tests will then be false. */
3941 return nonzero;
3942
3943#ifndef WORD_REGISTER_OPERATIONS
3944 /* If MODE is wider than X, but both are a single word for both the host
3945 and target machines, we can compute this from which bits of the
3946 object might be nonzero in its own mode, taking into account the fact
3947 that on many CISC machines, accessing an object in a wider mode
3948 causes the high-order bits to become undefined. So they are
3949 not known to be zero. */
3950
3951 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3952 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3953 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3954 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3955 {
3956 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3957 known_x, known_mode, known_ret);
3958 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3959 return nonzero;
3960 }
3961#endif
3962
3963 code = GET_CODE (x);
3964 switch (code)
3965 {
3966 case REG:
3967#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3968 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3969 all the bits above ptr_mode are known to be zero. */
3970 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3971 && REG_POINTER (x))
3972 nonzero &= GET_MODE_MASK (ptr_mode);
3973#endif
3974
3975 /* Include declared information about alignment of pointers. */
3976 /* ??? We don't properly preserve REG_POINTER changes across
3977 pointer-to-integer casts, so we can't trust it except for
3978 things that we know must be pointers. See execute/960116-1.c. */
3979 if ((x == stack_pointer_rtx
3980 || x == frame_pointer_rtx
3981 || x == arg_pointer_rtx)
3982 && REGNO_POINTER_ALIGN (REGNO (x)))
3983 {
3984 unsigned HOST_WIDE_INT alignment
3985 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3986
3987#ifdef PUSH_ROUNDING
3988 /* If PUSH_ROUNDING is defined, it is possible for the
3989 stack to be momentarily aligned only to that amount,
3990 so we pick the least alignment. */
3991 if (x == stack_pointer_rtx && PUSH_ARGS)
3992 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3993 alignment);
3994#endif
3995
3996 nonzero &= ~(alignment - 1);
3997 }
3998
3999 {
4000 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4001 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
4002 known_mode, known_ret,
4003 &nonzero_for_hook);
4004
4005 if (new)
4006 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
4007 known_mode, known_ret);
4008
4009 return nonzero_for_hook;
4010 }
4011
4012 case CONST_INT:
4013#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4014 /* If X is negative in MODE, sign-extend the value. */
4015 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
4016 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
4017 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
4018#endif
4019
4020 return INTVAL (x);
4021
4022 case MEM:
4023#ifdef LOAD_EXTEND_OP
4024 /* In many, if not most, RISC machines, reading a byte from memory
4025 zeros the rest of the register. Noticing that fact saves a lot
4026 of extra zero-extends. */
4027 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4028 nonzero &= GET_MODE_MASK (GET_MODE (x));
4029#endif
4030 break;
4031
4032 case EQ: case NE:
4033 case UNEQ: case LTGT:
4034 case GT: case GTU: case UNGT:
4035 case LT: case LTU: case UNLT:
4036 case GE: case GEU: case UNGE:
4037 case LE: case LEU: case UNLE:
4038 case UNORDERED: case ORDERED:
4039
4040 /* If this produces an integer result, we know which bits are set.
4041 Code here used to clear bits outside the mode of X, but that is
4042 now done above. */
4043
4044 if (GET_MODE_CLASS (mode) == MODE_INT
4045 && mode_width <= HOST_BITS_PER_WIDE_INT)
4046 nonzero = STORE_FLAG_VALUE;
4047 break;
4048
4049 case NEG:
4050#if 0
4051 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4052 and num_sign_bit_copies. */
4053 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4054 == GET_MODE_BITSIZE (GET_MODE (x)))
4055 nonzero = 1;
4056#endif
4057
4058 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
4059 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4060 break;
4061
4062 case ABS:
4063#if 0
4064 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4065 and num_sign_bit_copies. */
4066 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4067 == GET_MODE_BITSIZE (GET_MODE (x)))
4068 nonzero = 1;
4069#endif
4070 break;
4071
4072 case TRUNCATE:
4073 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4074 known_x, known_mode, known_ret)
4075 & GET_MODE_MASK (mode));
4076 break;
4077
4078 case ZERO_EXTEND:
4079 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4080 known_x, known_mode, known_ret);
4081 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4082 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4083 break;
4084
4085 case SIGN_EXTEND:
4086 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4087 Otherwise, show all the bits in the outer mode but not the inner
4088 may be nonzero. */
4089 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4090 known_x, known_mode, known_ret);
4091 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4092 {
4093 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4094 if (inner_nz
4095 & (((HOST_WIDE_INT) 1
4096 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
4097 inner_nz |= (GET_MODE_MASK (mode)
4098 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4099 }
4100
4101 nonzero &= inner_nz;
4102 break;
4103
4104 case AND:
4105 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4106 known_x, known_mode, known_ret)
4107 & cached_nonzero_bits (XEXP (x, 1), mode,
4108 known_x, known_mode, known_ret);
4109 break;
4110
4111 case XOR: case IOR:
4112 case UMIN: case UMAX: case SMIN: case SMAX:
4113 {
4114 unsigned HOST_WIDE_INT nonzero0 =
4115 cached_nonzero_bits (XEXP (x, 0), mode,
4116 known_x, known_mode, known_ret);
4117
4118 /* Don't call nonzero_bits for the second time if it cannot change
4119 anything. */
4120 if ((nonzero & nonzero0) != nonzero)
4121 nonzero &= nonzero0
4122 | cached_nonzero_bits (XEXP (x, 1), mode,
4123 known_x, known_mode, known_ret);
4124 }
4125 break;
4126
4127 case PLUS: case MINUS:
4128 case MULT:
4129 case DIV: case UDIV:
4130 case MOD: case UMOD:
4131 /* We can apply the rules of arithmetic to compute the number of
4132 high- and low-order zero bits of these operations. We start by
4133 computing the width (position of the highest-order nonzero bit)
4134 and the number of low-order zero bits for each value. */
4135 {
4136 unsigned HOST_WIDE_INT nz0 =
4137 cached_nonzero_bits (XEXP (x, 0), mode,
4138 known_x, known_mode, known_ret);
4139 unsigned HOST_WIDE_INT nz1 =
4140 cached_nonzero_bits (XEXP (x, 1), mode,
4141 known_x, known_mode, known_ret);
4142 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
4143 int width0 = floor_log2 (nz0) + 1;
4144 int width1 = floor_log2 (nz1) + 1;
4145 int low0 = floor_log2 (nz0 & -nz0);
4146 int low1 = floor_log2 (nz1 & -nz1);
4147 HOST_WIDE_INT op0_maybe_minusp
4148 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
4149 HOST_WIDE_INT op1_maybe_minusp
4150 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
4151 unsigned int result_width = mode_width;
4152 int result_low = 0;
4153
4154 switch (code)
4155 {
4156 case PLUS:
4157 result_width = MAX (width0, width1) + 1;
4158 result_low = MIN (low0, low1);
4159 break;
4160 case MINUS:
4161 result_low = MIN (low0, low1);
4162 break;
4163 case MULT:
4164 result_width = width0 + width1;
4165 result_low = low0 + low1;
4166 break;
4167 case DIV:
4168 if (width1 == 0)
4169 break;
4170 if (! op0_maybe_minusp && ! op1_maybe_minusp)
4171 result_width = width0;
4172 break;
4173 case UDIV:
4174 if (width1 == 0)
4175 break;
4176 result_width = width0;
4177 break;
4178 case MOD:
4179 if (width1 == 0)
4180 break;
4181 if (! op0_maybe_minusp && ! op1_maybe_minusp)
4182 result_width = MIN (width0, width1);
4183 result_low = MIN (low0, low1);
4184 break;
4185 case UMOD:
4186 if (width1 == 0)
4187 break;
4188 result_width = MIN (width0, width1);
4189 result_low = MIN (low0, low1);
4190 break;
4191 default:
4192 abort ();
4193 }
4194
4195 if (result_width < mode_width)
4196 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
4197
4198 if (result_low > 0)
4199 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
4200
4201#ifdef POINTERS_EXTEND_UNSIGNED
4202 /* If pointers extend unsigned and this is an addition or subtraction
4203 to a pointer in Pmode, all the bits above ptr_mode are known to be
4204 zero. */
4205 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
4206 && (code == PLUS || code == MINUS)
f8cfc6aa 4207 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
4208 nonzero &= GET_MODE_MASK (ptr_mode);
4209#endif
4210 }
4211 break;
4212
4213 case ZERO_EXTRACT:
4214 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4215 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4216 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4217 break;
4218
4219 case SUBREG:
4220 /* If this is a SUBREG formed for a promoted variable that has
4221 been zero-extended, we know that at least the high-order bits
4222 are zero, though others might be too. */
4223
4224 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4225 nonzero = GET_MODE_MASK (GET_MODE (x))
4226 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4227 known_x, known_mode, known_ret);
4228
4229 /* If the inner mode is a single word for both the host and target
4230 machines, we can compute this from which bits of the inner
4231 object might be nonzero. */
4232 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
4233 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4234 <= HOST_BITS_PER_WIDE_INT))
4235 {
4236 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4237 known_x, known_mode, known_ret);
4238
4239#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4240 /* If this is a typical RISC machine, we only have to worry
4241 about the way loads are extended. */
4242 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4243 ? (((nonzero
4244 & (((unsigned HOST_WIDE_INT) 1
4245 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4246 != 0))
4247 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3c0cb5de 4248 || !MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4249#endif
4250 {
4251 /* On many CISC machines, accessing an object in a wider mode
4252 causes the high-order bits to become undefined. So they are
4253 not known to be zero. */
4254 if (GET_MODE_SIZE (GET_MODE (x))
4255 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4256 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4257 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4258 }
4259 }
4260 break;
4261
4262 case ASHIFTRT:
4263 case LSHIFTRT:
4264 case ASHIFT:
4265 case ROTATE:
4266 /* The nonzero bits are in two classes: any bits within MODE
4267 that aren't in GET_MODE (x) are always significant. The rest of the
4268 nonzero bits are those that are significant in the operand of
4269 the shift when shifted the appropriate number of bits. This
4270 shows that high-order bits are cleared by the right shift and
4271 low-order bits by left shifts. */
4272 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4273 && INTVAL (XEXP (x, 1)) >= 0
4274 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4275 {
4276 enum machine_mode inner_mode = GET_MODE (x);
4277 unsigned int width = GET_MODE_BITSIZE (inner_mode);
4278 int count = INTVAL (XEXP (x, 1));
4279 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4280 unsigned HOST_WIDE_INT op_nonzero =
4281 cached_nonzero_bits (XEXP (x, 0), mode,
4282 known_x, known_mode, known_ret);
4283 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4284 unsigned HOST_WIDE_INT outer = 0;
4285
4286 if (mode_width > width)
4287 outer = (op_nonzero & nonzero & ~mode_mask);
4288
4289 if (code == LSHIFTRT)
4290 inner >>= count;
4291 else if (code == ASHIFTRT)
4292 {
4293 inner >>= count;
4294
4295 /* If the sign bit may have been nonzero before the shift, we
4296 need to mark all the places it could have been copied to
4297 by the shift as possibly nonzero. */
4298 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4299 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4300 }
4301 else if (code == ASHIFT)
4302 inner <<= count;
4303 else
4304 inner = ((inner << (count % width)
4305 | (inner >> (width - (count % width)))) & mode_mask);
4306
4307 nonzero &= (outer | inner);
4308 }
4309 break;
4310
4311 case FFS:
4312 case POPCOUNT:
4313 /* This is at most the number of bits in the mode. */
4314 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4315 break;
4316
4317 case CLZ:
4318 /* If CLZ has a known value at zero, then the nonzero bits are
4319 that value, plus the number of bits in the mode minus one. */
4320 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4321 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4322 else
4323 nonzero = -1;
4324 break;
4325
4326 case CTZ:
4327 /* If CTZ has a known value at zero, then the nonzero bits are
4328 that value, plus the number of bits in the mode minus one. */
4329 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4330 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4331 else
4332 nonzero = -1;
4333 break;
4334
4335 case PARITY:
4336 nonzero = 1;
4337 break;
4338
4339 case IF_THEN_ELSE:
4340 {
4341 unsigned HOST_WIDE_INT nonzero_true =
4342 cached_nonzero_bits (XEXP (x, 1), mode,
4343 known_x, known_mode, known_ret);
4344
4345 /* Don't call nonzero_bits for the second time if it cannot change
4346 anything. */
4347 if ((nonzero & nonzero_true) != nonzero)
4348 nonzero &= nonzero_true
4349 | cached_nonzero_bits (XEXP (x, 2), mode,
4350 known_x, known_mode, known_ret);
4351 }
4352 break;
4353
4354 default:
4355 break;
4356 }
4357
4358 return nonzero;
4359}
4360
4361/* See the macro definition above. */
4362#undef cached_num_sign_bit_copies
4363
4364\f
4365/* The function cached_num_sign_bit_copies is a wrapper around
4366 num_sign_bit_copies1. It avoids exponential behavior in
4367 num_sign_bit_copies1 when X has identical subexpressions on the
4368 first or the second level. */
4369
4370static unsigned int
4371cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4372 enum machine_mode known_mode,
4373 unsigned int known_ret)
4374{
4375 if (x == known_x && mode == known_mode)
4376 return known_ret;
4377
4378 /* Try to find identical subexpressions. If found call
4379 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4380 the precomputed value for the subexpression as KNOWN_RET. */
4381
4382 if (ARITHMETIC_P (x))
4383 {
4384 rtx x0 = XEXP (x, 0);
4385 rtx x1 = XEXP (x, 1);
4386
4387 /* Check the first level. */
4388 if (x0 == x1)
4389 return
4390 num_sign_bit_copies1 (x, mode, x0, mode,
4391 cached_num_sign_bit_copies (x0, mode, known_x,
4392 known_mode,
4393 known_ret));
4394
4395 /* Check the second level. */
4396 if (ARITHMETIC_P (x0)
4397 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4398 return
4399 num_sign_bit_copies1 (x, mode, x1, mode,
4400 cached_num_sign_bit_copies (x1, mode, known_x,
4401 known_mode,
4402 known_ret));
4403
4404 if (ARITHMETIC_P (x1)
4405 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4406 return
4407 num_sign_bit_copies1 (x, mode, x0, mode,
4408 cached_num_sign_bit_copies (x0, mode, known_x,
4409 known_mode,
4410 known_ret));
4411 }
4412
4413 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4414}
4415
4416/* Return the number of bits at the high-order end of X that are known to
4417 be equal to the sign bit. X will be used in mode MODE; if MODE is
4418 VOIDmode, X will be used in its own mode. The returned value will always
4419 be between 1 and the number of bits in MODE. */
4420
4421static unsigned int
4422num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4423 enum machine_mode known_mode,
4424 unsigned int known_ret)
4425{
4426 enum rtx_code code = GET_CODE (x);
4427 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4428 int num0, num1, result;
4429 unsigned HOST_WIDE_INT nonzero;
4430
4431 /* If we weren't given a mode, use the mode of X. If the mode is still
4432 VOIDmode, we don't know anything. Likewise if one of the modes is
4433 floating-point. */
4434
4435 if (mode == VOIDmode)
4436 mode = GET_MODE (x);
4437
4438 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4439 return 1;
4440
4441 /* For a smaller object, just ignore the high bits. */
4442 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4443 {
4444 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4445 known_x, known_mode, known_ret);
4446 return MAX (1,
4447 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4448 }
4449
4450 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4451 {
4452#ifndef WORD_REGISTER_OPERATIONS
4453 /* If this machine does not do all register operations on the entire
4454 register and MODE is wider than the mode of X, we can say nothing
4455 at all about the high-order bits. */
4456 return 1;
4457#else
4458 /* Likewise on machines that do, if the mode of the object is smaller
4459 than a word and loads of that size don't sign extend, we can say
4460 nothing about the high order bits. */
4461 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4462#ifdef LOAD_EXTEND_OP
4463 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4464#endif
4465 )
4466 return 1;
4467#endif
4468 }
4469
4470 switch (code)
4471 {
4472 case REG:
4473
4474#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4475 /* If pointers extend signed and this is a pointer in Pmode, say that
4476 all the bits above ptr_mode are known to be sign bit copies. */
4477 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4478 && REG_POINTER (x))
4479 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4480#endif
4481
4482 {
4483 unsigned int copies_for_hook = 1, copies = 1;
4484 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4485 known_mode, known_ret,
4486 &copies_for_hook);
4487
4488 if (new)
4489 copies = cached_num_sign_bit_copies (new, mode, known_x,
4490 known_mode, known_ret);
4491
4492 if (copies > 1 || copies_for_hook > 1)
4493 return MAX (copies, copies_for_hook);
4494
4495 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4496 }
4497 break;
4498
4499 case MEM:
4500#ifdef LOAD_EXTEND_OP
4501 /* Some RISC machines sign-extend all loads of smaller than a word. */
4502 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4503 return MAX (1, ((int) bitwidth
4504 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4505#endif
4506 break;
4507
4508 case CONST_INT:
4509 /* If the constant is negative, take its 1's complement and remask.
4510 Then see how many zero bits we have. */
4511 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4512 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4513 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4514 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4515
4516 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4517
4518 case SUBREG:
4519 /* If this is a SUBREG for a promoted object that is sign-extended
4520 and we are looking at it in a wider mode, we know that at least the
4521 high-order bits are known to be sign bit copies. */
4522
4523 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4524 {
4525 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4526 known_x, known_mode, known_ret);
4527 return MAX ((int) bitwidth
4528 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4529 num0);
4530 }
4531
4532 /* For a smaller object, just ignore the high bits. */
4533 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4534 {
4535 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4536 known_x, known_mode, known_ret);
4537 return MAX (1, (num0
4538 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4539 - bitwidth)));
4540 }
4541
4542#ifdef WORD_REGISTER_OPERATIONS
4543#ifdef LOAD_EXTEND_OP
4544 /* For paradoxical SUBREGs on machines where all register operations
4545 affect the entire register, just look inside. Note that we are
4546 passing MODE to the recursive call, so the number of sign bit copies
4547 will remain relative to that mode, not the inner mode. */
4548
4549 /* This works only if loads sign extend. Otherwise, if we get a
4550 reload for the inner part, it may be loaded from the stack, and
4551 then we lose all sign bit copies that existed before the store
4552 to the stack. */
4553
4554 if ((GET_MODE_SIZE (GET_MODE (x))
4555 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4556 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3c0cb5de 4557 && MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4558 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4559 known_x, known_mode, known_ret);
4560#endif
4561#endif
4562 break;
4563
4564 case SIGN_EXTRACT:
4565 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4566 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4567 break;
4568
4569 case SIGN_EXTEND:
4570 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4571 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4572 known_x, known_mode, known_ret));
4573
4574 case TRUNCATE:
4575 /* For a smaller object, just ignore the high bits. */
4576 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4577 known_x, known_mode, known_ret);
4578 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4579 - bitwidth)));
4580
4581 case NOT:
4582 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4583 known_x, known_mode, known_ret);
4584
4585 case ROTATE: case ROTATERT:
4586 /* If we are rotating left by a number of bits less than the number
4587 of sign bit copies, we can just subtract that amount from the
4588 number. */
4589 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4590 && INTVAL (XEXP (x, 1)) >= 0
4591 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4592 {
4593 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4594 known_x, known_mode, known_ret);
4595 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4596 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4597 }
4598 break;
4599
4600 case NEG:
4601 /* In general, this subtracts one sign bit copy. But if the value
4602 is known to be positive, the number of sign bit copies is the
4603 same as that of the input. Finally, if the input has just one bit
4604 that might be nonzero, all the bits are copies of the sign bit. */
4605 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4606 known_x, known_mode, known_ret);
4607 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4608 return num0 > 1 ? num0 - 1 : 1;
4609
4610 nonzero = nonzero_bits (XEXP (x, 0), mode);
4611 if (nonzero == 1)
4612 return bitwidth;
4613
4614 if (num0 > 1
4615 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4616 num0--;
4617
4618 return num0;
4619
4620 case IOR: case AND: case XOR:
4621 case SMIN: case SMAX: case UMIN: case UMAX:
4622 /* Logical operations will preserve the number of sign-bit copies.
4623 MIN and MAX operations always return one of the operands. */
4624 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4625 known_x, known_mode, known_ret);
4626 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4627 known_x, known_mode, known_ret);
4628 return MIN (num0, num1);
4629
4630 case PLUS: case MINUS:
4631 /* For addition and subtraction, we can have a 1-bit carry. However,
4632 if we are subtracting 1 from a positive number, there will not
4633 be such a carry. Furthermore, if the positive number is known to
4634 be 0 or 1, we know the result is either -1 or 0. */
4635
4636 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4637 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4638 {
4639 nonzero = nonzero_bits (XEXP (x, 0), mode);
4640 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4641 return (nonzero == 1 || nonzero == 0 ? bitwidth
4642 : bitwidth - floor_log2 (nonzero) - 1);
4643 }
4644
4645 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4646 known_x, known_mode, known_ret);
4647 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4648 known_x, known_mode, known_ret);
4649 result = MAX (1, MIN (num0, num1) - 1);
4650
4651#ifdef POINTERS_EXTEND_UNSIGNED
4652 /* If pointers extend signed and this is an addition or subtraction
4653 to a pointer in Pmode, all the bits above ptr_mode are known to be
4654 sign bit copies. */
4655 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4656 && (code == PLUS || code == MINUS)
f8cfc6aa 4657 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
4658 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4659 - GET_MODE_BITSIZE (ptr_mode) + 1),
4660 result);
4661#endif
4662 return result;
4663
4664 case MULT:
4665 /* The number of bits of the product is the sum of the number of
4666 bits of both terms. However, unless one of the terms if known
4667 to be positive, we must allow for an additional bit since negating
4668 a negative number can remove one sign bit copy. */
4669
4670 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4671 known_x, known_mode, known_ret);
4672 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4673 known_x, known_mode, known_ret);
4674
4675 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4676 if (result > 0
4677 && (bitwidth > HOST_BITS_PER_WIDE_INT
4678 || (((nonzero_bits (XEXP (x, 0), mode)
4679 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4680 && ((nonzero_bits (XEXP (x, 1), mode)
4681 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4682 result--;
4683
4684 return MAX (1, result);
4685
4686 case UDIV:
4687 /* The result must be <= the first operand. If the first operand
4688 has the high bit set, we know nothing about the number of sign
4689 bit copies. */
4690 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4691 return 1;
4692 else if ((nonzero_bits (XEXP (x, 0), mode)
4693 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4694 return 1;
4695 else
4696 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4697 known_x, known_mode, known_ret);
4698
4699 case UMOD:
4700 /* The result must be <= the second operand. */
4701 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4702 known_x, known_mode, known_ret);
4703
4704 case DIV:
4705 /* Similar to unsigned division, except that we have to worry about
4706 the case where the divisor is negative, in which case we have
4707 to add 1. */
4708 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4709 known_x, known_mode, known_ret);
4710 if (result > 1
4711 && (bitwidth > HOST_BITS_PER_WIDE_INT
4712 || (nonzero_bits (XEXP (x, 1), mode)
4713 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4714 result--;
4715
4716 return result;
4717
4718 case MOD:
4719 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4720 known_x, known_mode, known_ret);
4721 if (result > 1
4722 && (bitwidth > HOST_BITS_PER_WIDE_INT
4723 || (nonzero_bits (XEXP (x, 1), mode)
4724 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4725 result--;
4726
4727 return result;
4728
4729 case ASHIFTRT:
4730 /* Shifts by a constant add to the number of bits equal to the
4731 sign bit. */
4732 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4733 known_x, known_mode, known_ret);
4734 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4735 && INTVAL (XEXP (x, 1)) > 0)
4736 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4737
4738 return num0;
4739
4740 case ASHIFT:
4741 /* Left shifts destroy copies. */
4742 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4743 || INTVAL (XEXP (x, 1)) < 0
4744 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4745 return 1;
4746
4747 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4748 known_x, known_mode, known_ret);
4749 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4750
4751 case IF_THEN_ELSE:
4752 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4753 known_x, known_mode, known_ret);
4754 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4755 known_x, known_mode, known_ret);
4756 return MIN (num0, num1);
4757
4758 case EQ: case NE: case GE: case GT: case LE: case LT:
4759 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4760 case GEU: case GTU: case LEU: case LTU:
4761 case UNORDERED: case ORDERED:
4762 /* If the constant is negative, take its 1's complement and remask.
4763 Then see how many zero bits we have. */
4764 nonzero = STORE_FLAG_VALUE;
4765 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4766 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4767 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4768
4769 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4770
4771 default:
4772 break;
4773 }
4774
4775 /* If we haven't been able to figure it out by one of the above rules,
4776 see if some of the high-order bits are known to be zero. If so,
4777 count those bits and return one less than that amount. If we can't
4778 safely compute the mask for this mode, always return BITWIDTH. */
4779
4780 bitwidth = GET_MODE_BITSIZE (mode);
4781 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4782 return 1;
4783
4784 nonzero = nonzero_bits (x, mode);
4785 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4786 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4787}