]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
Finish conversion of uses of double_int to the new API.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
54fb1ae0 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
c2969d8e 4 2010, 2011
b6f65e3c 5 Free Software Foundation, Inc.
23b2ce53 6
1322177d 7This file is part of GCC.
23b2ce53 8
1322177d
LB
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
1322177d 12version.
23b2ce53 13
1322177d
LB
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
23b2ce53
RS
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
23b2ce53
RS
22
23
24/* Middle-to-low level generation of rtx code and insns.
25
f822fcf7
KH
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
23b2ce53
RS
36
37#include "config.h"
670ee920 38#include "system.h"
4977bab6
ZW
39#include "coretypes.h"
40#include "tm.h"
718f9c0f 41#include "diagnostic-core.h"
23b2ce53 42#include "rtl.h"
a25c7971 43#include "tree.h"
6baf1cc8 44#include "tm_p.h"
23b2ce53
RS
45#include "flags.h"
46#include "function.h"
47#include "expr.h"
78bde837 48#include "vecprim.h"
23b2ce53 49#include "regs.h"
aff48bca 50#include "hard-reg-set.h"
c13e8210 51#include "hashtab.h"
23b2ce53 52#include "insn-config.h"
e9a25f70 53#include "recog.h"
0dfa1860 54#include "bitmap.h"
a05924f9 55#include "basic-block.h"
87ff9c8e 56#include "ggc.h"
e1772ac0 57#include "debug.h"
d23c55c2 58#include "langhooks.h"
6fb5fa3c 59#include "df.h"
b5b8b0ac 60#include "params.h"
d4ebfa65 61#include "target.h"
ca695ac9 62
5fb0e246
RS
63struct target_rtl default_target_rtl;
64#if SWITCHABLE_TARGET
65struct target_rtl *this_target_rtl = &default_target_rtl;
66#endif
67
68#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
1d445e9e
ILT
70/* Commonly used modes. */
71
0f41302f
MS
72enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 74enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 75enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 76
bd60bab2
JH
77/* Datastructures maintained for currently processed function in RTL form. */
78
3e029763 79struct rtl_data x_rtl;
bd60bab2
JH
80
81/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 82 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86rtx * regno_reg_rtx;
23b2ce53
RS
87
88/* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
044b4de3 91static GTY(()) int label_num = 1;
23b2ce53 92
23b2ce53
RS
93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 97
e7c82a99 98rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 99
68d75312
JC
100rtx const_true_rtx;
101
23b2ce53
RS
102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
03f2ea93 106REAL_VALUE_TYPE dconsthalf;
23b2ce53 107
325217ed
CF
108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
23b2ce53
RS
112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
5da077de 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 118
ca4adc91
RS
119/* Standard pieces of rtx, to be substituted directly into things. */
120rtx pc_rtx;
121rtx ret_rtx;
122rtx simple_return_rtx;
123rtx cc0_rtx;
124
c13e8210
MM
125/* A hash table storing CONST_INTs whose absolute value is greater
126 than MAX_SAVED_CONST_INT. */
127
e2500fed
GK
128static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t const_int_htab;
c13e8210 130
173b24b9 131/* A hash table storing memory attribute structures. */
e2500fed
GK
132static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
133 htab_t mem_attrs_htab;
173b24b9 134
a560d4d4
JH
135/* A hash table storing register attribute structures. */
136static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
137 htab_t reg_attrs_htab;
138
5692c7bc 139/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
140static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
141 htab_t const_double_htab;
5692c7bc 142
091a3ac7
CF
143/* A hash table storing all CONST_FIXEDs. */
144static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
145 htab_t const_fixed_htab;
146
3e029763 147#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 148#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 149#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 150
502b8322 151static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 152static void set_used_decls (tree);
502b8322
AJ
153static void mark_label_nuses (rtx);
154static hashval_t const_int_htab_hash (const void *);
155static int const_int_htab_eq (const void *, const void *);
156static hashval_t const_double_htab_hash (const void *);
157static int const_double_htab_eq (const void *, const void *);
158static rtx lookup_const_double (rtx);
091a3ac7
CF
159static hashval_t const_fixed_htab_hash (const void *);
160static int const_fixed_htab_eq (const void *, const void *);
161static rtx lookup_const_fixed (rtx);
502b8322
AJ
162static hashval_t mem_attrs_htab_hash (const void *);
163static int mem_attrs_htab_eq (const void *, const void *);
502b8322
AJ
164static hashval_t reg_attrs_htab_hash (const void *);
165static int reg_attrs_htab_eq (const void *, const void *);
166static reg_attrs *get_reg_attrs (tree, int);
a73b091d 167static rtx gen_const_vector (enum machine_mode, int);
32b32b16 168static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 169
6b24c259
JH
170/* Probability of the conditional branch currently proceeded by try_split.
171 Set to -1 otherwise. */
172int split_branch_probability = -1;
ca695ac9 173\f
c13e8210
MM
174/* Returns a hash code for X (which is a really a CONST_INT). */
175
176static hashval_t
502b8322 177const_int_htab_hash (const void *x)
c13e8210 178{
f7d504c2 179 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
180}
181
cc2902df 182/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
183 CONST_INT) is the same as that given by Y (which is really a
184 HOST_WIDE_INT *). */
185
186static int
502b8322 187const_int_htab_eq (const void *x, const void *y)
c13e8210 188{
f7d504c2 189 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
190}
191
192/* Returns a hash code for X (which is really a CONST_DOUBLE). */
193static hashval_t
502b8322 194const_double_htab_hash (const void *x)
5692c7bc 195{
f7d504c2 196 const_rtx const value = (const_rtx) x;
46b33600 197 hashval_t h;
5692c7bc 198
46b33600
RH
199 if (GET_MODE (value) == VOIDmode)
200 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
201 else
fe352c29 202 {
15c812e3 203 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
204 /* MODE is used in the comparison, so it should be in the hash. */
205 h ^= GET_MODE (value);
206 }
5692c7bc
ZW
207 return h;
208}
209
cc2902df 210/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
211 is the same as that represented by Y (really a ...) */
212static int
502b8322 213const_double_htab_eq (const void *x, const void *y)
5692c7bc 214{
f7d504c2 215 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
216
217 if (GET_MODE (a) != GET_MODE (b))
218 return 0;
8580f7a0
RH
219 if (GET_MODE (a) == VOIDmode)
220 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
221 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
222 else
223 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
224 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
225}
226
091a3ac7
CF
227/* Returns a hash code for X (which is really a CONST_FIXED). */
228
229static hashval_t
230const_fixed_htab_hash (const void *x)
231{
3101faab 232 const_rtx const value = (const_rtx) x;
091a3ac7
CF
233 hashval_t h;
234
235 h = fixed_hash (CONST_FIXED_VALUE (value));
236 /* MODE is used in the comparison, so it should be in the hash. */
237 h ^= GET_MODE (value);
238 return h;
239}
240
241/* Returns nonzero if the value represented by X (really a ...)
242 is the same as that represented by Y (really a ...). */
243
244static int
245const_fixed_htab_eq (const void *x, const void *y)
246{
3101faab 247 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
248
249 if (GET_MODE (a) != GET_MODE (b))
250 return 0;
251 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
252}
253
173b24b9
RK
254/* Returns a hash code for X (which is a really a mem_attrs *). */
255
256static hashval_t
502b8322 257mem_attrs_htab_hash (const void *x)
173b24b9 258{
f7d504c2 259 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
260
261 return (p->alias ^ (p->align * 1000)
09e881c9 262 ^ (p->addrspace * 4000)
754c3d5d
RS
263 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
264 ^ ((p->size_known_p ? p->size : 0) * 2500000)
78b76d08 265 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
266}
267
f12144dd 268/* Return true if the given memory attributes are equal. */
c13e8210 269
f12144dd
RS
270static bool
271mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 272{
754c3d5d
RS
273 return (p->alias == q->alias
274 && p->offset_known_p == q->offset_known_p
275 && (!p->offset_known_p || p->offset == q->offset)
276 && p->size_known_p == q->size_known_p
277 && (!p->size_known_p || p->size == q->size)
278 && p->align == q->align
09e881c9 279 && p->addrspace == q->addrspace
78b76d08
SB
280 && (p->expr == q->expr
281 || (p->expr != NULL_TREE && q->expr != NULL_TREE
282 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
283}
284
f12144dd
RS
285/* Returns nonzero if the value represented by X (which is really a
286 mem_attrs *) is the same as that given by Y (which is also really a
287 mem_attrs *). */
173b24b9 288
f12144dd
RS
289static int
290mem_attrs_htab_eq (const void *x, const void *y)
173b24b9 291{
f12144dd
RS
292 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
293}
173b24b9 294
f12144dd 295/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 296
f12144dd
RS
297static void
298set_mem_attrs (rtx mem, mem_attrs *attrs)
299{
300 void **slot;
301
302 /* If everything is the default, we can just clear the attributes. */
303 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
304 {
305 MEM_ATTRS (mem) = 0;
306 return;
307 }
173b24b9 308
f12144dd 309 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
173b24b9
RK
310 if (*slot == 0)
311 {
a9429e29 312 *slot = ggc_alloc_mem_attrs ();
f12144dd 313 memcpy (*slot, attrs, sizeof (mem_attrs));
173b24b9
RK
314 }
315
f12144dd 316 MEM_ATTRS (mem) = (mem_attrs *) *slot;
c13e8210
MM
317}
318
a560d4d4
JH
319/* Returns a hash code for X (which is a really a reg_attrs *). */
320
321static hashval_t
502b8322 322reg_attrs_htab_hash (const void *x)
a560d4d4 323{
741ac903 324 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4 325
9841210f 326 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
327}
328
6356f892 329/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
330 reg_attrs *) is the same as that given by Y (which is also really a
331 reg_attrs *). */
332
333static int
502b8322 334reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 335{
741ac903
KG
336 const reg_attrs *const p = (const reg_attrs *) x;
337 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
338
339 return (p->decl == q->decl && p->offset == q->offset);
340}
341/* Allocate a new reg_attrs structure and insert it into the hash table if
342 one identical to it is not already in the table. We are doing this for
343 MEM of mode MODE. */
344
345static reg_attrs *
502b8322 346get_reg_attrs (tree decl, int offset)
a560d4d4
JH
347{
348 reg_attrs attrs;
349 void **slot;
350
351 /* If everything is the default, we can just return zero. */
352 if (decl == 0 && offset == 0)
353 return 0;
354
355 attrs.decl = decl;
356 attrs.offset = offset;
357
358 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
359 if (*slot == 0)
360 {
a9429e29 361 *slot = ggc_alloc_reg_attrs ();
a560d4d4
JH
362 memcpy (*slot, &attrs, sizeof (reg_attrs));
363 }
364
1b4572a8 365 return (reg_attrs *) *slot;
a560d4d4
JH
366}
367
6fb5fa3c
DB
368
369#if !HAVE_blockage
370/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
371 across this insn. */
372
373rtx
374gen_blockage (void)
375{
376 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
377 MEM_VOLATILE_P (x) = true;
378 return x;
379}
380#endif
381
382
08394eef
BS
383/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
384 don't attempt to share with the various global pieces of rtl (such as
385 frame_pointer_rtx). */
386
387rtx
502b8322 388gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
389{
390 rtx x = gen_rtx_raw_REG (mode, regno);
391 ORIGINAL_REGNO (x) = regno;
392 return x;
393}
394
c5c76735
JL
395/* There are some RTL codes that require special attention; the generation
396 functions do the raw handling. If you add to this list, modify
397 special_rtx in gengenrtl.c as well. */
398
3b80f6ca 399rtx
502b8322 400gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 401{
c13e8210
MM
402 void **slot;
403
3b80f6ca 404 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 405 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
406
407#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
408 if (const_true_rtx && arg == STORE_FLAG_VALUE)
409 return const_true_rtx;
410#endif
411
c13e8210 412 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
413 slot = htab_find_slot_with_hash (const_int_htab, &arg,
414 (hashval_t) arg, INSERT);
29105cea 415 if (*slot == 0)
1f8f4a0b 416 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
417
418 return (rtx) *slot;
3b80f6ca
RH
419}
420
2496c7bd 421rtx
502b8322 422gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
423{
424 return GEN_INT (trunc_int_for_mode (c, mode));
425}
426
5692c7bc
ZW
427/* CONST_DOUBLEs might be created from pairs of integers, or from
428 REAL_VALUE_TYPEs. Also, their length is known only at run time,
429 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
430
431/* Determine whether REAL, a CONST_DOUBLE, already exists in the
432 hash table. If so, return its counterpart; otherwise add it
433 to the hash table and return it. */
434static rtx
502b8322 435lookup_const_double (rtx real)
5692c7bc
ZW
436{
437 void **slot = htab_find_slot (const_double_htab, real, INSERT);
438 if (*slot == 0)
439 *slot = real;
440
441 return (rtx) *slot;
442}
29105cea 443
5692c7bc
ZW
444/* Return a CONST_DOUBLE rtx for a floating-point value specified by
445 VALUE in mode MODE. */
0133b7d9 446rtx
502b8322 447const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 448{
5692c7bc
ZW
449 rtx real = rtx_alloc (CONST_DOUBLE);
450 PUT_MODE (real, mode);
451
9e254451 452 real->u.rv = value;
5692c7bc
ZW
453
454 return lookup_const_double (real);
455}
456
091a3ac7
CF
457/* Determine whether FIXED, a CONST_FIXED, already exists in the
458 hash table. If so, return its counterpart; otherwise add it
459 to the hash table and return it. */
460
461static rtx
462lookup_const_fixed (rtx fixed)
463{
464 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
465 if (*slot == 0)
466 *slot = fixed;
467
468 return (rtx) *slot;
469}
470
471/* Return a CONST_FIXED rtx for a fixed-point value specified by
472 VALUE in mode MODE. */
473
474rtx
475const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
476{
477 rtx fixed = rtx_alloc (CONST_FIXED);
478 PUT_MODE (fixed, mode);
479
480 fixed->u.fv = value;
481
482 return lookup_const_fixed (fixed);
483}
484
3e93ff81
AS
485/* Constructs double_int from rtx CST. */
486
487double_int
488rtx_to_double_int (const_rtx cst)
489{
490 double_int r;
491
492 if (CONST_INT_P (cst))
27bcd47c 493 r = double_int::from_shwi (INTVAL (cst));
48175537 494 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
495 {
496 r.low = CONST_DOUBLE_LOW (cst);
497 r.high = CONST_DOUBLE_HIGH (cst);
498 }
499 else
500 gcc_unreachable ();
501
502 return r;
503}
504
505
54fb1ae0
AS
506/* Return a CONST_DOUBLE or CONST_INT for a value specified as
507 a double_int. */
508
509rtx
510immed_double_int_const (double_int i, enum machine_mode mode)
511{
512 return immed_double_const (i.low, i.high, mode);
513}
514
5692c7bc
ZW
515/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
516 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 517 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
518 implied upper bits are copies of the high bit of i1. The value
519 itself is neither signed nor unsigned. Do not use this routine for
520 non-integer modes; convert to REAL_VALUE_TYPE and use
521 CONST_DOUBLE_FROM_REAL_VALUE. */
5692c7bc
ZW
522
523rtx
502b8322 524immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
525{
526 rtx value;
527 unsigned int i;
528
65acccdd 529 /* There are the following cases (note that there are no modes with
49ab6098 530 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
531
532 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
533 gen_int_mode.
929e10f4
MS
534 2) If the value of the integer fits into HOST_WIDE_INT anyway
535 (i.e., i1 consists only from copies of the sign bit, and sign
536 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 537 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
538 if (mode != VOIDmode)
539 {
5b0264cb
NS
540 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
541 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
542 /* We can get a 0 for an error mark. */
543 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 545
65acccdd
ZD
546 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
547 return gen_int_mode (i0, mode);
5692c7bc
ZW
548 }
549
550 /* If this integer fits in one word, return a CONST_INT. */
551 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
552 return GEN_INT (i0);
553
554 /* We use VOIDmode for integers. */
555 value = rtx_alloc (CONST_DOUBLE);
556 PUT_MODE (value, VOIDmode);
557
558 CONST_DOUBLE_LOW (value) = i0;
559 CONST_DOUBLE_HIGH (value) = i1;
560
561 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
562 XWINT (value, i) = 0;
563
564 return lookup_const_double (value);
0133b7d9
RH
565}
566
3b80f6ca 567rtx
502b8322 568gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
569{
570 /* In case the MD file explicitly references the frame pointer, have
571 all such references point to the same frame pointer. This is
572 used during frame pointer elimination to distinguish the explicit
573 references to these registers from pseudos that happened to be
574 assigned to them.
575
576 If we have eliminated the frame pointer or arg pointer, we will
577 be using it as a normal register, for example as a spill
578 register. In such cases, we might be accessing it in a mode that
579 is not Pmode and therefore cannot use the pre-allocated rtx.
580
581 Also don't do this when we are making new REGs in reload, since
582 we don't want to get confused with the real pointers. */
583
584 if (mode == Pmode && !reload_in_progress)
585 {
e10c79fe
LB
586 if (regno == FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
3b80f6ca 588 return frame_pointer_rtx;
e3339d0f 589#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
590 if (regno == HARD_FRAME_POINTER_REGNUM
591 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
592 return hard_frame_pointer_rtx;
593#endif
e3339d0f 594#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 595 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
596 return arg_pointer_rtx;
597#endif
598#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 599 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
600 return return_address_pointer_rtx;
601#endif
fc555370 602 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 603 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 604 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 605 return pic_offset_table_rtx;
bcb33994 606 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
607 return stack_pointer_rtx;
608 }
609
006a94b0 610#if 0
6cde4876 611 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
612 an existing entry in that table to avoid useless generation of RTL.
613
614 This code is disabled for now until we can fix the various backends
615 which depend on having non-shared hard registers in some cases. Long
616 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
617 on the amount of useless RTL that gets generated.
618
619 We'll also need to fix some code that runs after reload that wants to
620 set ORIGINAL_REGNO. */
621
6cde4876
JL
622 if (cfun
623 && cfun->emit
624 && regno_reg_rtx
625 && regno < FIRST_PSEUDO_REGISTER
626 && reg_raw_mode[regno] == mode)
627 return regno_reg_rtx[regno];
006a94b0 628#endif
6cde4876 629
08394eef 630 return gen_raw_REG (mode, regno);
3b80f6ca
RH
631}
632
41472af8 633rtx
502b8322 634gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
635{
636 rtx rt = gen_rtx_raw_MEM (mode, addr);
637
638 /* This field is not cleared by the mere allocation of the rtx, so
639 we clear it here. */
173b24b9 640 MEM_ATTRS (rt) = 0;
41472af8
MM
641
642 return rt;
643}
ddef6bc7 644
542a8afa
RH
645/* Generate a memory referring to non-trapping constant memory. */
646
647rtx
648gen_const_mem (enum machine_mode mode, rtx addr)
649{
650 rtx mem = gen_rtx_MEM (mode, addr);
651 MEM_READONLY_P (mem) = 1;
652 MEM_NOTRAP_P (mem) = 1;
653 return mem;
654}
655
bf877a76
R
656/* Generate a MEM referring to fixed portions of the frame, e.g., register
657 save areas. */
658
659rtx
660gen_frame_mem (enum machine_mode mode, rtx addr)
661{
662 rtx mem = gen_rtx_MEM (mode, addr);
663 MEM_NOTRAP_P (mem) = 1;
664 set_mem_alias_set (mem, get_frame_alias_set ());
665 return mem;
666}
667
668/* Generate a MEM referring to a temporary use of the stack, not part
669 of the fixed stack frame. For example, something which is pushed
670 by a target splitter. */
671rtx
672gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
673{
674 rtx mem = gen_rtx_MEM (mode, addr);
675 MEM_NOTRAP_P (mem) = 1;
e3b5732b 676 if (!cfun->calls_alloca)
bf877a76
R
677 set_mem_alias_set (mem, get_frame_alias_set ());
678 return mem;
679}
680
beb72684
RH
681/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
682 this construct would be valid, and false otherwise. */
683
684bool
685validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 686 const_rtx reg, unsigned int offset)
ddef6bc7 687{
beb72684
RH
688 unsigned int isize = GET_MODE_SIZE (imode);
689 unsigned int osize = GET_MODE_SIZE (omode);
690
691 /* All subregs must be aligned. */
692 if (offset % osize != 0)
693 return false;
694
695 /* The subreg offset cannot be outside the inner object. */
696 if (offset >= isize)
697 return false;
698
699 /* ??? This should not be here. Temporarily continue to allow word_mode
700 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
701 Generally, backends are doing something sketchy but it'll take time to
702 fix them all. */
703 if (omode == word_mode)
704 ;
705 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
706 is the culprit here, and not the backends. */
707 else if (osize >= UNITS_PER_WORD && isize >= osize)
708 ;
709 /* Allow component subregs of complex and vector. Though given the below
710 extraction rules, it's not always clear what that means. */
711 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
712 && GET_MODE_INNER (imode) == omode)
713 ;
714 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
715 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
716 represent this. It's questionable if this ought to be represented at
717 all -- why can't this all be hidden in post-reload splitters that make
718 arbitrarily mode changes to the registers themselves. */
719 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
720 ;
721 /* Subregs involving floating point modes are not allowed to
722 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
723 (subreg:SI (reg:DF) 0) isn't. */
724 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
725 {
726 if (isize != osize)
727 return false;
728 }
ddef6bc7 729
beb72684
RH
730 /* Paradoxical subregs must have offset zero. */
731 if (osize > isize)
732 return offset == 0;
733
734 /* This is a normal subreg. Verify that the offset is representable. */
735
736 /* For hard registers, we already have most of these rules collected in
737 subreg_offset_representable_p. */
738 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
739 {
740 unsigned int regno = REGNO (reg);
741
742#ifdef CANNOT_CHANGE_MODE_CLASS
743 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
744 && GET_MODE_INNER (imode) == omode)
745 ;
746 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
747 return false;
ddef6bc7 748#endif
beb72684
RH
749
750 return subreg_offset_representable_p (regno, imode, offset, omode);
751 }
752
753 /* For pseudo registers, we want most of the same checks. Namely:
754 If the register no larger than a word, the subreg must be lowpart.
755 If the register is larger than a word, the subreg must be the lowpart
756 of a subword. A subreg does *not* perform arbitrary bit extraction.
757 Given that we've already checked mode/offset alignment, we only have
758 to check subword subregs here. */
759 if (osize < UNITS_PER_WORD)
760 {
761 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
762 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
763 if (offset % UNITS_PER_WORD != low_off)
764 return false;
765 }
766 return true;
767}
768
769rtx
770gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
771{
772 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 773 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
774}
775
173b24b9
RK
776/* Generate a SUBREG representing the least-significant part of REG if MODE
777 is smaller than mode of REG, otherwise paradoxical SUBREG. */
778
ddef6bc7 779rtx
502b8322 780gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
781{
782 enum machine_mode inmode;
ddef6bc7
JJ
783
784 inmode = GET_MODE (reg);
785 if (inmode == VOIDmode)
786 inmode = mode;
e0e08ac2
JH
787 return gen_rtx_SUBREG (mode, reg,
788 subreg_lowpart_offset (mode, inmode));
ddef6bc7 789}
c5c76735 790\f
23b2ce53 791
80379f51
PB
792/* Create an rtvec and stores within it the RTXen passed in the arguments. */
793
23b2ce53 794rtvec
e34d07f2 795gen_rtvec (int n, ...)
23b2ce53 796{
80379f51
PB
797 int i;
798 rtvec rt_val;
e34d07f2 799 va_list p;
23b2ce53 800
e34d07f2 801 va_start (p, n);
23b2ce53 802
80379f51 803 /* Don't allocate an empty rtvec... */
23b2ce53 804 if (n == 0)
0edf1bb2
JL
805 {
806 va_end (p);
807 return NULL_RTVEC;
808 }
23b2ce53 809
80379f51 810 rt_val = rtvec_alloc (n);
4f90e4a0 811
23b2ce53 812 for (i = 0; i < n; i++)
80379f51 813 rt_val->elem[i] = va_arg (p, rtx);
6268b922 814
e34d07f2 815 va_end (p);
80379f51 816 return rt_val;
23b2ce53
RS
817}
818
819rtvec
502b8322 820gen_rtvec_v (int n, rtx *argp)
23b2ce53 821{
b3694847
SS
822 int i;
823 rtvec rt_val;
23b2ce53 824
80379f51 825 /* Don't allocate an empty rtvec... */
23b2ce53 826 if (n == 0)
80379f51 827 return NULL_RTVEC;
23b2ce53 828
80379f51 829 rt_val = rtvec_alloc (n);
23b2ce53
RS
830
831 for (i = 0; i < n; i++)
8f985ec4 832 rt_val->elem[i] = *argp++;
23b2ce53
RS
833
834 return rt_val;
835}
836\f
38ae7651
RS
837/* Return the number of bytes between the start of an OUTER_MODE
838 in-memory value and the start of an INNER_MODE in-memory value,
839 given that the former is a lowpart of the latter. It may be a
840 paradoxical lowpart, in which case the offset will be negative
841 on big-endian targets. */
842
843int
844byte_lowpart_offset (enum machine_mode outer_mode,
845 enum machine_mode inner_mode)
846{
847 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
848 return subreg_lowpart_offset (outer_mode, inner_mode);
849 else
850 return -subreg_lowpart_offset (inner_mode, outer_mode);
851}
852\f
23b2ce53
RS
853/* Generate a REG rtx for a new pseudo register of mode MODE.
854 This pseudo is assigned the next sequential register number. */
855
856rtx
502b8322 857gen_reg_rtx (enum machine_mode mode)
23b2ce53 858{
b3694847 859 rtx val;
2e3f842f 860 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 861
f8335a4f 862 gcc_assert (can_create_pseudo_p ());
23b2ce53 863
2e3f842f
L
864 /* If a virtual register with bigger mode alignment is generated,
865 increase stack alignment estimation because it might be spilled
866 to stack later. */
b8698a0f 867 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
868 && crtl->stack_alignment_estimated < align
869 && !crtl->stack_realign_processed)
ae58e548
JJ
870 {
871 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
872 if (crtl->stack_alignment_estimated < min_align)
873 crtl->stack_alignment_estimated = min_align;
874 }
2e3f842f 875
1b3d8f8a
GK
876 if (generating_concat_p
877 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
878 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
879 {
880 /* For complex modes, don't make a single pseudo.
881 Instead, make a CONCAT of two pseudos.
882 This allows noncontiguous allocation of the real and imaginary parts,
883 which makes much better code. Besides, allocating DCmode
884 pseudos overstrains reload on some machines like the 386. */
885 rtx realpart, imagpart;
27e58a70 886 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
887
888 realpart = gen_reg_rtx (partmode);
889 imagpart = gen_reg_rtx (partmode);
3b80f6ca 890 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
891 }
892
a560d4d4 893 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 894 enough to have an element for this pseudo reg number. */
23b2ce53 895
3e029763 896 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 897 {
3e029763 898 int old_size = crtl->emit.regno_pointer_align_length;
60564289 899 char *tmp;
0d4903b8 900 rtx *new1;
0d4903b8 901
60564289
KG
902 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
903 memset (tmp + old_size, 0, old_size);
904 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 905
1b4572a8 906 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 907 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
908 regno_reg_rtx = new1;
909
3e029763 910 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
911 }
912
08394eef 913 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
914 regno_reg_rtx[reg_rtx_no++] = val;
915 return val;
916}
917
38ae7651
RS
918/* Update NEW with the same attributes as REG, but with OFFSET added
919 to the REG_OFFSET. */
a560d4d4 920
e53a16e7 921static void
60564289 922update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 923{
60564289 924 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 925 REG_OFFSET (reg) + offset);
e53a16e7
ILT
926}
927
38ae7651
RS
928/* Generate a register with same attributes as REG, but with OFFSET
929 added to the REG_OFFSET. */
e53a16e7
ILT
930
931rtx
932gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
933 int offset)
934{
60564289 935 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 936
60564289
KG
937 update_reg_offset (new_rtx, reg, offset);
938 return new_rtx;
e53a16e7
ILT
939}
940
941/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 942 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
943
944rtx
945gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
946{
60564289 947 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 948
60564289
KG
949 update_reg_offset (new_rtx, reg, offset);
950 return new_rtx;
a560d4d4
JH
951}
952
38ae7651
RS
953/* Adjust REG in-place so that it has mode MODE. It is assumed that the
954 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
955
956void
38ae7651 957adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 958{
38ae7651
RS
959 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
960 PUT_MODE (reg, mode);
961}
962
963/* Copy REG's attributes from X, if X has any attributes. If REG and X
964 have different modes, REG is a (possibly paradoxical) lowpart of X. */
965
966void
967set_reg_attrs_from_value (rtx reg, rtx x)
968{
969 int offset;
de6f3f7a
L
970 bool can_be_reg_pointer = true;
971
972 /* Don't call mark_reg_pointer for incompatible pointer sign
973 extension. */
974 while (GET_CODE (x) == SIGN_EXTEND
975 || GET_CODE (x) == ZERO_EXTEND
976 || GET_CODE (x) == TRUNCATE
977 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
978 {
979#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
980 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
981 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
982 can_be_reg_pointer = false;
983#endif
984 x = XEXP (x, 0);
985 }
38ae7651 986
923ba36f
JJ
987 /* Hard registers can be reused for multiple purposes within the same
988 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
989 on them is wrong. */
990 if (HARD_REGISTER_P (reg))
991 return;
992
38ae7651 993 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
994 if (MEM_P (x))
995 {
527210c4
RS
996 if (MEM_OFFSET_KNOWN_P (x))
997 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
998 MEM_OFFSET (x) + offset);
de6f3f7a 999 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1000 mark_reg_pointer (reg, 0);
46b71b03
PB
1001 }
1002 else if (REG_P (x))
1003 {
1004 if (REG_ATTRS (x))
1005 update_reg_offset (reg, x, offset);
de6f3f7a 1006 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1007 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1008 }
1009}
1010
1011/* Generate a REG rtx for a new pseudo register, copying the mode
1012 and attributes from X. */
1013
1014rtx
1015gen_reg_rtx_and_attrs (rtx x)
1016{
1017 rtx reg = gen_reg_rtx (GET_MODE (x));
1018 set_reg_attrs_from_value (reg, x);
1019 return reg;
a560d4d4
JH
1020}
1021
9d18e06b
JZ
1022/* Set the register attributes for registers contained in PARM_RTX.
1023 Use needed values from memory attributes of MEM. */
1024
1025void
502b8322 1026set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1027{
f8cfc6aa 1028 if (REG_P (parm_rtx))
38ae7651 1029 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1030 else if (GET_CODE (parm_rtx) == PARALLEL)
1031 {
1032 /* Check for a NULL entry in the first slot, used to indicate that the
1033 parameter goes both on the stack and in registers. */
1034 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1035 for (; i < XVECLEN (parm_rtx, 0); i++)
1036 {
1037 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1038 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1039 REG_ATTRS (XEXP (x, 0))
1040 = get_reg_attrs (MEM_EXPR (mem),
1041 INTVAL (XEXP (x, 1)));
1042 }
1043 }
1044}
1045
38ae7651
RS
1046/* Set the REG_ATTRS for registers in value X, given that X represents
1047 decl T. */
a560d4d4 1048
4e3825db 1049void
38ae7651
RS
1050set_reg_attrs_for_decl_rtl (tree t, rtx x)
1051{
1052 if (GET_CODE (x) == SUBREG)
fbe6ec81 1053 {
38ae7651
RS
1054 gcc_assert (subreg_lowpart_p (x));
1055 x = SUBREG_REG (x);
fbe6ec81 1056 }
f8cfc6aa 1057 if (REG_P (x))
38ae7651
RS
1058 REG_ATTRS (x)
1059 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1060 DECL_MODE (t)));
a560d4d4
JH
1061 if (GET_CODE (x) == CONCAT)
1062 {
1063 if (REG_P (XEXP (x, 0)))
1064 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1065 if (REG_P (XEXP (x, 1)))
1066 REG_ATTRS (XEXP (x, 1))
1067 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1068 }
1069 if (GET_CODE (x) == PARALLEL)
1070 {
d4afac5b
JZ
1071 int i, start;
1072
1073 /* Check for a NULL entry, used to indicate that the parameter goes
1074 both on the stack and in registers. */
1075 if (XEXP (XVECEXP (x, 0, 0), 0))
1076 start = 0;
1077 else
1078 start = 1;
1079
1080 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1081 {
1082 rtx y = XVECEXP (x, 0, i);
1083 if (REG_P (XEXP (y, 0)))
1084 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1085 }
1086 }
1087}
1088
38ae7651
RS
1089/* Assign the RTX X to declaration T. */
1090
1091void
1092set_decl_rtl (tree t, rtx x)
1093{
1094 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1095 if (x)
1096 set_reg_attrs_for_decl_rtl (t, x);
1097}
1098
5141868d
RS
1099/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1100 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1101
1102void
5141868d 1103set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1104{
1105 DECL_INCOMING_RTL (t) = x;
5141868d 1106 if (x && !by_reference_p)
38ae7651
RS
1107 set_reg_attrs_for_decl_rtl (t, x);
1108}
1109
754fdcca
RK
1110/* Identify REG (which may be a CONCAT) as a user register. */
1111
1112void
502b8322 1113mark_user_reg (rtx reg)
754fdcca
RK
1114{
1115 if (GET_CODE (reg) == CONCAT)
1116 {
1117 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1118 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1119 }
754fdcca 1120 else
5b0264cb
NS
1121 {
1122 gcc_assert (REG_P (reg));
1123 REG_USERVAR_P (reg) = 1;
1124 }
754fdcca
RK
1125}
1126
86fe05e0
RK
1127/* Identify REG as a probable pointer register and show its alignment
1128 as ALIGN, if nonzero. */
23b2ce53
RS
1129
1130void
502b8322 1131mark_reg_pointer (rtx reg, int align)
23b2ce53 1132{
3502dc9c 1133 if (! REG_POINTER (reg))
00995e78 1134 {
3502dc9c 1135 REG_POINTER (reg) = 1;
86fe05e0 1136
00995e78
RE
1137 if (align)
1138 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1139 }
1140 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1141 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1142 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1143}
1144
1145/* Return 1 plus largest pseudo reg number used in the current function. */
1146
1147int
502b8322 1148max_reg_num (void)
23b2ce53
RS
1149{
1150 return reg_rtx_no;
1151}
1152
1153/* Return 1 + the largest label number used so far in the current function. */
1154
1155int
502b8322 1156max_label_num (void)
23b2ce53 1157{
23b2ce53
RS
1158 return label_num;
1159}
1160
1161/* Return first label number used in this function (if any were used). */
1162
1163int
502b8322 1164get_first_label_num (void)
23b2ce53
RS
1165{
1166 return first_label_num;
1167}
6de9cd9a
DN
1168
1169/* If the rtx for label was created during the expansion of a nested
1170 function, then first_label_num won't include this label number.
fa10beec 1171 Fix this now so that array indices work later. */
6de9cd9a
DN
1172
1173void
1174maybe_set_first_label_num (rtx x)
1175{
1176 if (CODE_LABEL_NUMBER (x) < first_label_num)
1177 first_label_num = CODE_LABEL_NUMBER (x);
1178}
23b2ce53
RS
1179\f
1180/* Return a value representing some low-order bits of X, where the number
1181 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1182 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1183 representation is returned.
1184
1185 This function handles the cases in common between gen_lowpart, below,
1186 and two variants in cse.c and combine.c. These are the cases that can
1187 be safely handled at all points in the compilation.
1188
1189 If this is not a case we can handle, return 0. */
1190
1191rtx
502b8322 1192gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1193{
ddef6bc7 1194 int msize = GET_MODE_SIZE (mode);
550d1387 1195 int xsize;
ddef6bc7 1196 int offset = 0;
550d1387
GK
1197 enum machine_mode innermode;
1198
1199 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1200 so we have to make one up. Yuk. */
1201 innermode = GET_MODE (x);
481683e1 1202 if (CONST_INT_P (x)
db487452 1203 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1204 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1205 else if (innermode == VOIDmode)
49ab6098 1206 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1207
550d1387
GK
1208 xsize = GET_MODE_SIZE (innermode);
1209
5b0264cb 1210 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1211
550d1387 1212 if (innermode == mode)
23b2ce53
RS
1213 return x;
1214
1215 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1216 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1217 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1218 return 0;
1219
53501a19 1220 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1221 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1222 return 0;
1223
550d1387 1224 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1225
1226 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1227 && (GET_MODE_CLASS (mode) == MODE_INT
1228 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1229 {
1230 /* If we are getting the low-order part of something that has been
1231 sign- or zero-extended, we can either just use the object being
1232 extended or make a narrower extension. If we want an even smaller
1233 piece than the size of the object being extended, call ourselves
1234 recursively.
1235
1236 This case is used mostly by combine and cse. */
1237
1238 if (GET_MODE (XEXP (x, 0)) == mode)
1239 return XEXP (x, 0);
550d1387 1240 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1241 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1242 else if (msize < xsize)
3b80f6ca 1243 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1244 }
f8cfc6aa 1245 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1246 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
48175537 1247 || CONST_DOUBLE_P (x) || CONST_INT_P (x))
550d1387 1248 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1249
23b2ce53
RS
1250 /* Otherwise, we can't do this. */
1251 return 0;
1252}
1253\f
ccba022b 1254rtx
502b8322 1255gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1256{
ddef6bc7 1257 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1258 rtx result;
ddef6bc7 1259
ccba022b
RS
1260 /* This case loses if X is a subreg. To catch bugs early,
1261 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1262 gcc_assert (msize <= UNITS_PER_WORD
1263 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1264
e0e08ac2
JH
1265 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1266 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1267 gcc_assert (result);
b8698a0f 1268
09482e0d
JW
1269 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1270 the target if we have a MEM. gen_highpart must return a valid operand,
1271 emitting code if necessary to do so. */
5b0264cb
NS
1272 if (MEM_P (result))
1273 {
1274 result = validize_mem (result);
1275 gcc_assert (result);
1276 }
b8698a0f 1277
e0e08ac2
JH
1278 return result;
1279}
5222e470 1280
26d249eb 1281/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1282 be VOIDmode constant. */
1283rtx
502b8322 1284gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1285{
1286 if (GET_MODE (exp) != VOIDmode)
1287 {
5b0264cb 1288 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1289 return gen_highpart (outermode, exp);
1290 }
1291 return simplify_gen_subreg (outermode, exp, innermode,
1292 subreg_highpart_offset (outermode, innermode));
1293}
68252e27 1294
38ae7651 1295/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1296
e0e08ac2 1297unsigned int
502b8322 1298subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1299{
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1302
e0e08ac2 1303 if (difference > 0)
ccba022b 1304 {
e0e08ac2
JH
1305 if (WORDS_BIG_ENDIAN)
1306 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1307 if (BYTES_BIG_ENDIAN)
1308 offset += difference % UNITS_PER_WORD;
ccba022b 1309 }
ddef6bc7 1310
e0e08ac2 1311 return offset;
ccba022b 1312}
eea50aa0 1313
e0e08ac2
JH
1314/* Return offset in bytes to get OUTERMODE high part
1315 of the value in mode INNERMODE stored in memory in target format. */
1316unsigned int
502b8322 1317subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1318{
1319 unsigned int offset = 0;
1320 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1321
5b0264cb 1322 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1323
eea50aa0
JH
1324 if (difference > 0)
1325 {
e0e08ac2 1326 if (! WORDS_BIG_ENDIAN)
eea50aa0 1327 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1328 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1329 offset += difference % UNITS_PER_WORD;
1330 }
1331
e0e08ac2 1332 return offset;
eea50aa0 1333}
ccba022b 1334
23b2ce53
RS
1335/* Return 1 iff X, assumed to be a SUBREG,
1336 refers to the least significant part of its containing reg.
1337 If X is not a SUBREG, always return 1 (it is its own low part!). */
1338
1339int
fa233e34 1340subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1341{
1342 if (GET_CODE (x) != SUBREG)
1343 return 1;
a3a03040
RK
1344 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1345 return 0;
23b2ce53 1346
e0e08ac2
JH
1347 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1348 == SUBREG_BYTE (x));
23b2ce53 1349}
6a4bdc79
BS
1350
1351/* Return true if X is a paradoxical subreg, false otherwise. */
1352bool
1353paradoxical_subreg_p (const_rtx x)
1354{
1355 if (GET_CODE (x) != SUBREG)
1356 return false;
1357 return (GET_MODE_PRECISION (GET_MODE (x))
1358 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1359}
23b2ce53 1360\f
ddef6bc7
JJ
1361/* Return subword OFFSET of operand OP.
1362 The word number, OFFSET, is interpreted as the word number starting
1363 at the low-order address. OFFSET 0 is the low-order word if not
1364 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1365
1366 If we cannot extract the required word, we return zero. Otherwise,
1367 an rtx corresponding to the requested word will be returned.
1368
1369 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1370 reload has completed, a valid address will always be returned. After
1371 reload, if a valid address cannot be returned, we return zero.
1372
1373 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1374 it is the responsibility of the caller.
1375
1376 MODE is the mode of OP in case it is a CONST_INT.
1377
1378 ??? This is still rather broken for some cases. The problem for the
1379 moment is that all callers of this thing provide no 'goal mode' to
1380 tell us to work with. This exists because all callers were written
0631e0bf
JH
1381 in a word based SUBREG world.
1382 Now use of this function can be deprecated by simplify_subreg in most
1383 cases.
1384 */
ddef6bc7
JJ
1385
1386rtx
502b8322 1387operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1388{
1389 if (mode == VOIDmode)
1390 mode = GET_MODE (op);
1391
5b0264cb 1392 gcc_assert (mode != VOIDmode);
ddef6bc7 1393
30f7a378 1394 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1395 if (mode != BLKmode
1396 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1397 return 0;
1398
30f7a378 1399 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1400 if (mode != BLKmode
1401 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1402 return const0_rtx;
1403
ddef6bc7 1404 /* Form a new MEM at the requested address. */
3c0cb5de 1405 if (MEM_P (op))
ddef6bc7 1406 {
60564289 1407 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1408
f1ec5147 1409 if (! validate_address)
60564289 1410 return new_rtx;
f1ec5147
RK
1411
1412 else if (reload_completed)
ddef6bc7 1413 {
09e881c9
BE
1414 if (! strict_memory_address_addr_space_p (word_mode,
1415 XEXP (new_rtx, 0),
1416 MEM_ADDR_SPACE (op)))
f1ec5147 1417 return 0;
ddef6bc7 1418 }
f1ec5147 1419 else
60564289 1420 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1421 }
1422
0631e0bf
JH
1423 /* Rest can be handled by simplify_subreg. */
1424 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1425}
1426
535a42b1
NS
1427/* Similar to `operand_subword', but never return 0. If we can't
1428 extract the required subword, put OP into a register and try again.
1429 The second attempt must succeed. We always validate the address in
1430 this case.
23b2ce53
RS
1431
1432 MODE is the mode of OP, in case it is CONST_INT. */
1433
1434rtx
502b8322 1435operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1436{
ddef6bc7 1437 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1438
1439 if (result)
1440 return result;
1441
1442 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1443 {
1444 /* If this is a register which can not be accessed by words, copy it
1445 to a pseudo register. */
f8cfc6aa 1446 if (REG_P (op))
77e6b0eb
JC
1447 op = copy_to_reg (op);
1448 else
1449 op = force_reg (mode, op);
1450 }
23b2ce53 1451
ddef6bc7 1452 result = operand_subword (op, offset, 1, mode);
5b0264cb 1453 gcc_assert (result);
23b2ce53
RS
1454
1455 return result;
1456}
1457\f
2b3493c8
AK
1458/* Returns 1 if both MEM_EXPR can be considered equal
1459 and 0 otherwise. */
1460
1461int
4f588890 1462mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1463{
1464 if (expr1 == expr2)
1465 return 1;
1466
1467 if (! expr1 || ! expr2)
1468 return 0;
1469
1470 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1471 return 0;
1472
55b34b5f 1473 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1474}
1475
805903b5
JJ
1476/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1477 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1478 -1 if not known. */
1479
1480int
d9223014 1481get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1482{
1483 tree expr;
1484 unsigned HOST_WIDE_INT offset;
1485
1486 /* This function can't use
527210c4 1487 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1488 || (MAX (MEM_ALIGN (mem),
0eb77834 1489 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1490 < align))
1491 return -1;
1492 else
527210c4 1493 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1494 for two reasons:
1495 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1496 for <variable>. get_inner_reference doesn't handle it and
1497 even if it did, the alignment in that case needs to be determined
1498 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1499 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1500 isn't sufficiently aligned, the object it is in might be. */
1501 gcc_assert (MEM_P (mem));
1502 expr = MEM_EXPR (mem);
527210c4 1503 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1504 return -1;
1505
527210c4 1506 offset = MEM_OFFSET (mem);
805903b5
JJ
1507 if (DECL_P (expr))
1508 {
1509 if (DECL_ALIGN (expr) < align)
1510 return -1;
1511 }
1512 else if (INDIRECT_REF_P (expr))
1513 {
1514 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1515 return -1;
1516 }
1517 else if (TREE_CODE (expr) == COMPONENT_REF)
1518 {
1519 while (1)
1520 {
1521 tree inner = TREE_OPERAND (expr, 0);
1522 tree field = TREE_OPERAND (expr, 1);
1523 tree byte_offset = component_ref_field_offset (expr);
1524 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1525
1526 if (!byte_offset
1527 || !host_integerp (byte_offset, 1)
1528 || !host_integerp (bit_offset, 1))
1529 return -1;
1530
1531 offset += tree_low_cst (byte_offset, 1);
1532 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1533
1534 if (inner == NULL_TREE)
1535 {
1536 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1537 < (unsigned int) align)
1538 return -1;
1539 break;
1540 }
1541 else if (DECL_P (inner))
1542 {
1543 if (DECL_ALIGN (inner) < align)
1544 return -1;
1545 break;
1546 }
1547 else if (TREE_CODE (inner) != COMPONENT_REF)
1548 return -1;
1549 expr = inner;
1550 }
1551 }
1552 else
1553 return -1;
1554
1555 return offset & ((align / BITS_PER_UNIT) - 1);
1556}
1557
6926c713 1558/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1559 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1560 if we are making a new object of this type. BITPOS is nonzero if
1561 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1562
1563void
502b8322
AJ
1564set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1565 HOST_WIDE_INT bitpos)
173b24b9 1566{
6f1087be 1567 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1568 tree type;
f12144dd 1569 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1570 addr_space_t as;
173b24b9
RK
1571
1572 /* It can happen that type_for_mode was given a mode for which there
1573 is no language-level type. In which case it returns NULL, which
1574 we can see here. */
1575 if (t == NULL_TREE)
1576 return;
1577
1578 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1579 if (type == error_mark_node)
1580 return;
173b24b9 1581
173b24b9
RK
1582 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1583 wrong answer, as it assumes that DECL_RTL already has the right alias
1584 info. Callers should not set DECL_RTL until after the call to
1585 set_mem_attributes. */
5b0264cb 1586 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1587
f12144dd
RS
1588 memset (&attrs, 0, sizeof (attrs));
1589
738cc472 1590 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1591 front-end routine) and use it. */
f12144dd 1592 attrs.alias = get_alias_set (t);
173b24b9 1593
a5e9c810 1594 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1595 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1596
268f7033 1597 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1598 refattrs = MEM_ATTRS (ref);
1599 if (refattrs)
268f7033
UW
1600 {
1601 /* ??? Can this ever happen? Calling this routine on a MEM that
1602 already carries memory attributes should probably be invalid. */
f12144dd 1603 attrs.expr = refattrs->expr;
754c3d5d 1604 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1605 attrs.offset = refattrs->offset;
754c3d5d 1606 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1607 attrs.size = refattrs->size;
1608 attrs.align = refattrs->align;
268f7033
UW
1609 }
1610
1611 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1612 else
268f7033 1613 {
f12144dd
RS
1614 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1615 gcc_assert (!defattrs->expr);
754c3d5d 1616 gcc_assert (!defattrs->offset_known_p);
f12144dd 1617
268f7033 1618 /* Respect mode size. */
754c3d5d 1619 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1620 attrs.size = defattrs->size;
268f7033
UW
1621 /* ??? Is this really necessary? We probably should always get
1622 the size from the type below. */
1623
1624 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1625 if T is an object, always compute the object alignment below. */
f12144dd
RS
1626 if (TYPE_P (t))
1627 attrs.align = defattrs->align;
1628 else
1629 attrs.align = BITS_PER_UNIT;
268f7033
UW
1630 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1631 e.g. if the type carries an alignment attribute. Should we be
1632 able to simply always use TYPE_ALIGN? */
1633 }
1634
c3d32120
RK
1635 /* We can set the alignment from the type if we are making an object,
1636 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1637 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1638 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1639
70f34814
RG
1640 else if (TREE_CODE (t) == MEM_REF)
1641 {
a80903ff 1642 tree op0 = TREE_OPERAND (t, 0);
3e32c761
RG
1643 if (TREE_CODE (op0) == ADDR_EXPR
1644 && (DECL_P (TREE_OPERAND (op0, 0))
1645 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
70f34814 1646 {
3e32c761 1647 if (DECL_P (TREE_OPERAND (op0, 0)))
f12144dd 1648 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
3e32c761
RG
1649 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1650 {
f12144dd 1651 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
70f34814 1652#ifdef CONSTANT_ALIGNMENT
f12144dd
RS
1653 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1654 attrs.align);
70f34814 1655#endif
3e32c761
RG
1656 }
1657 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1658 {
1659 unsigned HOST_WIDE_INT ioff
1660 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1661 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
f12144dd 1662 attrs.align = MIN (aoff, attrs.align);
3e32c761 1663 }
70f34814
RG
1664 }
1665 else
5951297a
EB
1666 /* ??? This isn't fully correct, we can't set the alignment from the
1667 type in all cases. */
f12144dd 1668 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
70f34814 1669 }
a80903ff 1670
9407f6bc
RG
1671 else if (TREE_CODE (t) == TARGET_MEM_REF)
1672 /* ??? This isn't fully correct, we can't set the alignment from the
1673 type in all cases. */
f12144dd 1674 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
9407f6bc 1675
738cc472
RK
1676 /* If the size is known, we can set that. */
1677 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
754c3d5d
RS
1678 {
1679 attrs.size_known_p = true;
1680 attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1681 }
738cc472 1682
80965c18
RK
1683 /* If T is not a type, we may be able to deduce some more information about
1684 the expression. */
1685 if (! TYPE_P (t))
8ac61af7 1686 {
8476af98 1687 tree base;
df96b059 1688 bool align_computed = false;
389fdba0 1689
8ac61af7
RK
1690 if (TREE_THIS_VOLATILE (t))
1691 MEM_VOLATILE_P (ref) = 1;
173b24b9 1692
c56e3582
RK
1693 /* Now remove any conversions: they don't change what the underlying
1694 object is. Likewise for SAVE_EXPR. */
1043771b 1695 while (CONVERT_EXPR_P (t)
c56e3582
RK
1696 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1697 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1698 t = TREE_OPERAND (t, 0);
1699
4994da65
RG
1700 /* Note whether this expression can trap. */
1701 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1702
1703 base = get_base_address (t);
f18a7b25
MJ
1704 if (base)
1705 {
1706 if (DECL_P (base)
1707 && TREE_READONLY (base)
1708 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1709 && !TREE_THIS_VOLATILE (base))
1710 MEM_READONLY_P (ref) = 1;
1711
1712 /* Mark static const strings readonly as well. */
1713 if (TREE_CODE (base) == STRING_CST
1714 && TREE_READONLY (base)
1715 && TREE_STATIC (base))
1716 MEM_READONLY_P (ref) = 1;
1717
1718 if (TREE_CODE (base) == MEM_REF
1719 || TREE_CODE (base) == TARGET_MEM_REF)
1720 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1721 0))));
1722 else
1723 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1724 }
1725 else
1726 as = TYPE_ADDR_SPACE (type);
ba30e50d 1727
2039d7aa
RH
1728 /* If this expression uses it's parent's alias set, mark it such
1729 that we won't change it. */
1730 if (component_uses_parent_alias_set (t))
10b76d73
RK
1731 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1732
8ac61af7
RK
1733 /* If this is a decl, set the attributes of the MEM from it. */
1734 if (DECL_P (t))
1735 {
f12144dd 1736 attrs.expr = t;
754c3d5d
RS
1737 attrs.offset_known_p = true;
1738 attrs.offset = 0;
6f1087be 1739 apply_bitpos = bitpos;
754c3d5d
RS
1740 if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1741 {
1742 attrs.size_known_p = true;
1743 attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1744 }
1745 else
1746 attrs.size_known_p = false;
f12144dd 1747 attrs.align = DECL_ALIGN (t);
df96b059 1748 align_computed = true;
8ac61af7
RK
1749 }
1750
40c0668b 1751 /* If this is a constant, we know the alignment. */
6615c446 1752 else if (CONSTANT_CLASS_P (t))
9ddfb1a7 1753 {
f12144dd 1754 attrs.align = TYPE_ALIGN (type);
9ddfb1a7 1755#ifdef CONSTANT_ALIGNMENT
f12144dd 1756 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
9ddfb1a7 1757#endif
df96b059 1758 align_computed = true;
9ddfb1a7 1759 }
998d7deb
RH
1760
1761 /* If this is a field reference and not a bit-field, record it. */
fa10beec 1762 /* ??? There is some information that can be gleaned from bit-fields,
998d7deb
RH
1763 such as the word offset in the structure that might be modified.
1764 But skip it for now. */
1765 else if (TREE_CODE (t) == COMPONENT_REF
1766 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1767 {
f12144dd 1768 attrs.expr = t;
754c3d5d
RS
1769 attrs.offset_known_p = true;
1770 attrs.offset = 0;
6f1087be 1771 apply_bitpos = bitpos;
998d7deb
RH
1772 /* ??? Any reason the field size would be different than
1773 the size we got from the type? */
1774 }
1775
1776 /* If this is an array reference, look for an outer field reference. */
1777 else if (TREE_CODE (t) == ARRAY_REF)
1778 {
1779 tree off_tree = size_zero_node;
1b1838b6
JW
1780 /* We can't modify t, because we use it at the end of the
1781 function. */
1782 tree t2 = t;
998d7deb
RH
1783
1784 do
1785 {
1b1838b6 1786 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1787 tree low_bound = array_ref_low_bound (t2);
1788 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1789
1790 /* We assume all arrays have sizes that are a multiple of a byte.
1791 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1792 index, then convert to sizetype and multiply by the size of
1793 the array element. */
1794 if (! integer_zerop (low_bound))
4845b383
KH
1795 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1796 index, low_bound);
2567406a 1797
44de5aeb 1798 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1799 size_binop (MULT_EXPR,
1800 fold_convert (sizetype,
1801 index),
44de5aeb
RK
1802 unit_size),
1803 off_tree);
1b1838b6 1804 t2 = TREE_OPERAND (t2, 0);
998d7deb 1805 }
1b1838b6 1806 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1807
1b1838b6 1808 if (DECL_P (t2))
c67a1cf6 1809 {
f12144dd 1810 attrs.expr = t2;
754c3d5d 1811 attrs.offset_known_p = false;
c67a1cf6 1812 if (host_integerp (off_tree, 1))
40cb04f1
RH
1813 {
1814 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1815 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
f12144dd
RS
1816 attrs.align = DECL_ALIGN (t2);
1817 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1818 attrs.align = aoff;
df96b059 1819 align_computed = true;
754c3d5d
RS
1820 attrs.offset_known_p = true;
1821 attrs.offset = ioff;
6f1087be 1822 apply_bitpos = bitpos;
40cb04f1 1823 }
c67a1cf6 1824 }
1b1838b6 1825 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1826 {
f12144dd 1827 attrs.expr = t2;
754c3d5d 1828 attrs.offset_known_p = false;
998d7deb 1829 if (host_integerp (off_tree, 1))
6f1087be 1830 {
754c3d5d
RS
1831 attrs.offset_known_p = true;
1832 attrs.offset = tree_low_cst (off_tree, 1);
6f1087be
RH
1833 apply_bitpos = bitpos;
1834 }
998d7deb
RH
1835 /* ??? Any reason the field size would be different than
1836 the size we got from the type? */
1837 }
c67a1cf6
RH
1838 }
1839
56c47f22 1840 /* If this is an indirect reference, record it. */
70f34814 1841 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1842 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1843 {
f12144dd 1844 attrs.expr = t;
754c3d5d
RS
1845 attrs.offset_known_p = true;
1846 attrs.offset = 0;
56c47f22
RG
1847 apply_bitpos = bitpos;
1848 }
1849
0eb77834 1850 if (!align_computed)
df96b059 1851 {
0eb77834 1852 unsigned int obj_align = get_object_alignment (t);
f12144dd 1853 attrs.align = MAX (attrs.align, obj_align);
df96b059 1854 }
8ac61af7 1855 }
f18a7b25
MJ
1856 else
1857 as = TYPE_ADDR_SPACE (type);
8ac61af7 1858
15c812e3 1859 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1860 bit position offset. Similarly, increase the size of the accessed
1861 object to contain the negative offset. */
6f1087be 1862 if (apply_bitpos)
8c317c5f 1863 {
754c3d5d
RS
1864 gcc_assert (attrs.offset_known_p);
1865 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1866 if (attrs.size_known_p)
1867 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1868 }
6f1087be 1869
8ac61af7 1870 /* Now set the attributes we computed above. */
f18a7b25 1871 attrs.addrspace = as;
f12144dd 1872 set_mem_attrs (ref, &attrs);
173b24b9
RK
1873}
1874
6f1087be 1875void
502b8322 1876set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1877{
1878 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1879}
1880
173b24b9
RK
1881/* Set the alias set of MEM to SET. */
1882
1883void
4862826d 1884set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1885{
f12144dd
RS
1886 struct mem_attrs attrs;
1887
173b24b9 1888 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1889 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
1890 attrs = *get_mem_attrs (mem);
1891 attrs.alias = set;
1892 set_mem_attrs (mem, &attrs);
09e881c9
BE
1893}
1894
1895/* Set the address space of MEM to ADDRSPACE (target-defined). */
1896
1897void
1898set_mem_addr_space (rtx mem, addr_space_t addrspace)
1899{
f12144dd
RS
1900 struct mem_attrs attrs;
1901
1902 attrs = *get_mem_attrs (mem);
1903 attrs.addrspace = addrspace;
1904 set_mem_attrs (mem, &attrs);
173b24b9 1905}
738cc472 1906
d022d93e 1907/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1908
1909void
502b8322 1910set_mem_align (rtx mem, unsigned int align)
738cc472 1911{
f12144dd
RS
1912 struct mem_attrs attrs;
1913
1914 attrs = *get_mem_attrs (mem);
1915 attrs.align = align;
1916 set_mem_attrs (mem, &attrs);
738cc472 1917}
1285011e 1918
998d7deb 1919/* Set the expr for MEM to EXPR. */
1285011e
RK
1920
1921void
502b8322 1922set_mem_expr (rtx mem, tree expr)
1285011e 1923{
f12144dd
RS
1924 struct mem_attrs attrs;
1925
1926 attrs = *get_mem_attrs (mem);
1927 attrs.expr = expr;
1928 set_mem_attrs (mem, &attrs);
1285011e 1929}
998d7deb
RH
1930
1931/* Set the offset of MEM to OFFSET. */
1932
1933void
527210c4 1934set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 1935{
f12144dd
RS
1936 struct mem_attrs attrs;
1937
1938 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1939 attrs.offset_known_p = true;
1940 attrs.offset = offset;
527210c4
RS
1941 set_mem_attrs (mem, &attrs);
1942}
1943
1944/* Clear the offset of MEM. */
1945
1946void
1947clear_mem_offset (rtx mem)
1948{
1949 struct mem_attrs attrs;
1950
1951 attrs = *get_mem_attrs (mem);
754c3d5d 1952 attrs.offset_known_p = false;
f12144dd 1953 set_mem_attrs (mem, &attrs);
35aff10b
AM
1954}
1955
1956/* Set the size of MEM to SIZE. */
1957
1958void
f5541398 1959set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 1960{
f12144dd
RS
1961 struct mem_attrs attrs;
1962
1963 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1964 attrs.size_known_p = true;
1965 attrs.size = size;
f5541398
RS
1966 set_mem_attrs (mem, &attrs);
1967}
1968
1969/* Clear the size of MEM. */
1970
1971void
1972clear_mem_size (rtx mem)
1973{
1974 struct mem_attrs attrs;
1975
1976 attrs = *get_mem_attrs (mem);
754c3d5d 1977 attrs.size_known_p = false;
f12144dd 1978 set_mem_attrs (mem, &attrs);
998d7deb 1979}
173b24b9 1980\f
738cc472
RK
1981/* Return a memory reference like MEMREF, but with its mode changed to MODE
1982 and its address changed to ADDR. (VOIDmode means don't change the mode.
1983 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1984 returned memory location is required to be valid. The memory
1985 attributes are not changed. */
23b2ce53 1986
738cc472 1987static rtx
502b8322 1988change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1989{
09e881c9 1990 addr_space_t as;
60564289 1991 rtx new_rtx;
23b2ce53 1992
5b0264cb 1993 gcc_assert (MEM_P (memref));
09e881c9 1994 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
1995 if (mode == VOIDmode)
1996 mode = GET_MODE (memref);
1997 if (addr == 0)
1998 addr = XEXP (memref, 0);
a74ff877 1999 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2000 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2001 return memref;
23b2ce53 2002
f1ec5147 2003 if (validate)
23b2ce53 2004 {
f1ec5147 2005 if (reload_in_progress || reload_completed)
09e881c9 2006 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2007 else
09e881c9 2008 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2009 }
750c9258 2010
9b04c6a8
RK
2011 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2012 return memref;
2013
60564289
KG
2014 new_rtx = gen_rtx_MEM (mode, addr);
2015 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2016 return new_rtx;
23b2ce53 2017}
792760b9 2018
738cc472
RK
2019/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2020 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2021
2022rtx
502b8322 2023change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 2024{
f12144dd 2025 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
60564289 2026 enum machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2027 struct mem_attrs attrs, *defattrs;
4e44c1ef 2028
f12144dd
RS
2029 attrs = *get_mem_attrs (memref);
2030 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2031 attrs.expr = NULL_TREE;
2032 attrs.offset_known_p = false;
2033 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2034 attrs.size = defattrs->size;
2035 attrs.align = defattrs->align;
c2f7bcc3 2036
fdb1c7b3 2037 /* If there are no changes, just return the original memory reference. */
60564289 2038 if (new_rtx == memref)
4e44c1ef 2039 {
f12144dd 2040 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2041 return new_rtx;
4e44c1ef 2042
60564289
KG
2043 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2044 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2045 }
fdb1c7b3 2046
f12144dd 2047 set_mem_attrs (new_rtx, &attrs);
60564289 2048 return new_rtx;
f4ef873c 2049}
792760b9 2050
738cc472
RK
2051/* Return a memory reference like MEMREF, but with its mode changed
2052 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2053 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2054 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2055 and the caller is responsible for adjusting MEMREF base register.
2056 If ADJUST_OBJECT is zero, the underlying object associated with the
2057 memory reference is left unchanged and the caller is responsible for
2058 dealing with it. Otherwise, if the new memory reference is outside
2059 the underlying object, even partially, then the object is dropped. */
f1ec5147
RK
2060
2061rtx
502b8322 2062adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
5ef0b50d 2063 int validate, int adjust_address, int adjust_object)
f1ec5147 2064{
823e3574 2065 rtx addr = XEXP (memref, 0);
60564289 2066 rtx new_rtx;
f12144dd 2067 enum machine_mode address_mode;
a6fe9ed4 2068 int pbits;
f12144dd
RS
2069 struct mem_attrs attrs, *defattrs;
2070 unsigned HOST_WIDE_INT max_align;
2071
2072 attrs = *get_mem_attrs (memref);
823e3574 2073
fdb1c7b3
JH
2074 /* If there are no changes, just return the original memory reference. */
2075 if (mode == GET_MODE (memref) && !offset
f12144dd
RS
2076 && (!validate || memory_address_addr_space_p (mode, addr,
2077 attrs.addrspace)))
fdb1c7b3
JH
2078 return memref;
2079
d14419e4 2080 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2081 This may happen even if offset is nonzero -- consider
d14419e4
RH
2082 (plus (plus reg reg) const_int) -- so do this always. */
2083 addr = copy_rtx (addr);
2084
a6fe9ed4
JM
2085 /* Convert a possibly large offset to a signed value within the
2086 range of the target address space. */
372d6395 2087 address_mode = get_address_mode (memref);
d4ebfa65 2088 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2089 if (HOST_BITS_PER_WIDE_INT > pbits)
2090 {
2091 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2092 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2093 >> shift);
2094 }
2095
5ef0b50d 2096 if (adjust_address)
4a78c787
RH
2097 {
2098 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2099 object, we can merge it into the LO_SUM. */
2100 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2101 && offset >= 0
2102 && (unsigned HOST_WIDE_INT) offset
2103 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2104 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2105 plus_constant (address_mode,
2106 XEXP (addr, 1), offset));
4a78c787 2107 else
0a81f074 2108 addr = plus_constant (address_mode, addr, offset);
4a78c787 2109 }
823e3574 2110
60564289 2111 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472 2112
09efeca1
PB
2113 /* If the address is a REG, change_address_1 rightfully returns memref,
2114 but this would destroy memref's MEM_ATTRS. */
2115 if (new_rtx == memref && offset != 0)
2116 new_rtx = copy_rtx (new_rtx);
2117
5ef0b50d
EB
2118 /* Conservatively drop the object if we don't know where we start from. */
2119 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2120 {
2121 attrs.expr = NULL_TREE;
2122 attrs.alias = 0;
2123 }
2124
738cc472
RK
2125 /* Compute the new values of the memory attributes due to this adjustment.
2126 We add the offsets and update the alignment. */
754c3d5d 2127 if (attrs.offset_known_p)
5ef0b50d
EB
2128 {
2129 attrs.offset += offset;
2130
2131 /* Drop the object if the new left end is not within its bounds. */
2132 if (adjust_object && attrs.offset < 0)
2133 {
2134 attrs.expr = NULL_TREE;
2135 attrs.alias = 0;
2136 }
2137 }
738cc472 2138
03bf2c23
RK
2139 /* Compute the new alignment by taking the MIN of the alignment and the
2140 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2141 if zero. */
2142 if (offset != 0)
f12144dd
RS
2143 {
2144 max_align = (offset & -offset) * BITS_PER_UNIT;
2145 attrs.align = MIN (attrs.align, max_align);
2146 }
738cc472 2147
10b76d73 2148 /* We can compute the size in a number of ways. */
f12144dd 2149 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
754c3d5d
RS
2150 if (defattrs->size_known_p)
2151 {
5ef0b50d
EB
2152 /* Drop the object if the new right end is not within its bounds. */
2153 if (adjust_object && (offset + defattrs->size) > attrs.size)
2154 {
2155 attrs.expr = NULL_TREE;
2156 attrs.alias = 0;
2157 }
754c3d5d
RS
2158 attrs.size_known_p = true;
2159 attrs.size = defattrs->size;
2160 }
2161 else if (attrs.size_known_p)
5ef0b50d
EB
2162 {
2163 attrs.size -= offset;
2164 /* ??? The store_by_pieces machinery generates negative sizes. */
2165 gcc_assert (!(adjust_object && attrs.size < 0));
2166 }
10b76d73 2167
f12144dd 2168 set_mem_attrs (new_rtx, &attrs);
738cc472 2169
60564289 2170 return new_rtx;
f1ec5147
RK
2171}
2172
630036c6
JJ
2173/* Return a memory reference like MEMREF, but with its mode changed
2174 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2175 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2176 nonzero, the memory address is forced to be valid. */
2177
2178rtx
502b8322
AJ
2179adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2180 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2181{
2182 memref = change_address_1 (memref, VOIDmode, addr, validate);
5ef0b50d 2183 return adjust_address_1 (memref, mode, offset, validate, 0, 0);
630036c6
JJ
2184}
2185
8ac61af7
RK
2186/* Return a memory reference like MEMREF, but whose address is changed by
2187 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2188 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2189
2190rtx
502b8322 2191offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2192{
60564289 2193 rtx new_rtx, addr = XEXP (memref, 0);
f12144dd 2194 enum machine_mode address_mode;
754c3d5d 2195 struct mem_attrs attrs, *defattrs;
e3c8ea67 2196
f12144dd 2197 attrs = *get_mem_attrs (memref);
372d6395 2198 address_mode = get_address_mode (memref);
d4ebfa65 2199 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2200
68252e27 2201 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2202 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2203
2204 However, if we did go and rearrange things, we can wind up not
2205 being able to recognize the magic around pic_offset_table_rtx.
2206 This stuff is fragile, and is yet another example of why it is
2207 bad to expose PIC machinery too early. */
f12144dd
RS
2208 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2209 attrs.addrspace)
e3c8ea67
RH
2210 && GET_CODE (addr) == PLUS
2211 && XEXP (addr, 0) == pic_offset_table_rtx)
2212 {
2213 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2214 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2215 }
2216
60564289
KG
2217 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2218 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2219
fdb1c7b3 2220 /* If there are no changes, just return the original memory reference. */
60564289
KG
2221 if (new_rtx == memref)
2222 return new_rtx;
fdb1c7b3 2223
0d4903b8
RK
2224 /* Update the alignment to reflect the offset. Reset the offset, which
2225 we don't know. */
754c3d5d
RS
2226 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2227 attrs.offset_known_p = false;
2228 attrs.size_known_p = defattrs->size_known_p;
2229 attrs.size = defattrs->size;
f12144dd
RS
2230 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2231 set_mem_attrs (new_rtx, &attrs);
60564289 2232 return new_rtx;
0d4903b8 2233}
68252e27 2234
792760b9
RK
2235/* Return a memory reference like MEMREF, but with its address changed to
2236 ADDR. The caller is asserting that the actual piece of memory pointed
2237 to is the same, just the form of the address is being changed, such as
2238 by putting something into a register. */
2239
2240rtx
502b8322 2241replace_equiv_address (rtx memref, rtx addr)
792760b9 2242{
738cc472
RK
2243 /* change_address_1 copies the memory attribute structure without change
2244 and that's exactly what we want here. */
40c0668b 2245 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2246 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2247}
738cc472 2248
f1ec5147
RK
2249/* Likewise, but the reference is not required to be valid. */
2250
2251rtx
502b8322 2252replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2253{
f1ec5147
RK
2254 return change_address_1 (memref, VOIDmode, addr, 0);
2255}
e7dfe4bb
RH
2256
2257/* Return a memory reference like MEMREF, but with its mode widened to
2258 MODE and offset by OFFSET. This would be used by targets that e.g.
2259 cannot issue QImode memory operations and have to use SImode memory
2260 operations plus masking logic. */
2261
2262rtx
502b8322 2263widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2264{
5ef0b50d 2265 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0);
f12144dd 2266 struct mem_attrs attrs;
e7dfe4bb
RH
2267 unsigned int size = GET_MODE_SIZE (mode);
2268
fdb1c7b3 2269 /* If there are no changes, just return the original memory reference. */
60564289
KG
2270 if (new_rtx == memref)
2271 return new_rtx;
fdb1c7b3 2272
f12144dd
RS
2273 attrs = *get_mem_attrs (new_rtx);
2274
e7dfe4bb
RH
2275 /* If we don't know what offset we were at within the expression, then
2276 we can't know if we've overstepped the bounds. */
754c3d5d 2277 if (! attrs.offset_known_p)
f12144dd 2278 attrs.expr = NULL_TREE;
e7dfe4bb 2279
f12144dd 2280 while (attrs.expr)
e7dfe4bb 2281 {
f12144dd 2282 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2283 {
f12144dd
RS
2284 tree field = TREE_OPERAND (attrs.expr, 1);
2285 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2286
2287 if (! DECL_SIZE_UNIT (field))
2288 {
f12144dd 2289 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2290 break;
2291 }
2292
2293 /* Is the field at least as large as the access? If so, ok,
2294 otherwise strip back to the containing structure. */
03667700
RK
2295 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2296 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2297 && attrs.offset >= 0)
e7dfe4bb
RH
2298 break;
2299
44de5aeb 2300 if (! host_integerp (offset, 1))
e7dfe4bb 2301 {
f12144dd 2302 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2303 break;
2304 }
2305
f12144dd 2306 attrs.expr = TREE_OPERAND (attrs.expr, 0);
754c3d5d
RS
2307 attrs.offset += tree_low_cst (offset, 1);
2308 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2309 / BITS_PER_UNIT);
e7dfe4bb
RH
2310 }
2311 /* Similarly for the decl. */
f12144dd
RS
2312 else if (DECL_P (attrs.expr)
2313 && DECL_SIZE_UNIT (attrs.expr)
2314 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2315 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2316 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2317 break;
2318 else
2319 {
2320 /* The widened memory access overflows the expression, which means
2321 that it could alias another expression. Zap it. */
f12144dd 2322 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2323 break;
2324 }
2325 }
2326
f12144dd 2327 if (! attrs.expr)
754c3d5d 2328 attrs.offset_known_p = false;
e7dfe4bb
RH
2329
2330 /* The widened memory may alias other stuff, so zap the alias set. */
2331 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2332 attrs.alias = 0;
754c3d5d
RS
2333 attrs.size_known_p = true;
2334 attrs.size = size;
f12144dd 2335 set_mem_attrs (new_rtx, &attrs);
60564289 2336 return new_rtx;
e7dfe4bb 2337}
23b2ce53 2338\f
f6129d66
RH
2339/* A fake decl that is used as the MEM_EXPR of spill slots. */
2340static GTY(()) tree spill_slot_decl;
2341
3d7e23f6
RH
2342tree
2343get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2344{
2345 tree d = spill_slot_decl;
2346 rtx rd;
f12144dd 2347 struct mem_attrs attrs;
f6129d66 2348
3d7e23f6 2349 if (d || !force_build_p)
f6129d66
RH
2350 return d;
2351
c2255bc4
AH
2352 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2353 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2354 DECL_ARTIFICIAL (d) = 1;
2355 DECL_IGNORED_P (d) = 1;
2356 TREE_USED (d) = 1;
f6129d66
RH
2357 spill_slot_decl = d;
2358
2359 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2360 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2361 attrs = *mode_mem_attrs[(int) BLKmode];
2362 attrs.alias = new_alias_set ();
2363 attrs.expr = d;
2364 set_mem_attrs (rd, &attrs);
f6129d66
RH
2365 SET_DECL_RTL (d, rd);
2366
2367 return d;
2368}
2369
2370/* Given MEM, a result from assign_stack_local, fill in the memory
2371 attributes as appropriate for a register allocator spill slot.
2372 These slots are not aliasable by other memory. We arrange for
2373 them all to use a single MEM_EXPR, so that the aliasing code can
2374 work properly in the case of shared spill slots. */
2375
2376void
2377set_mem_attrs_for_spill (rtx mem)
2378{
f12144dd
RS
2379 struct mem_attrs attrs;
2380 rtx addr;
f6129d66 2381
f12144dd
RS
2382 attrs = *get_mem_attrs (mem);
2383 attrs.expr = get_spill_slot_decl (true);
2384 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2385 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2386
2387 /* We expect the incoming memory to be of the form:
2388 (mem:MODE (plus (reg sfp) (const_int offset)))
2389 with perhaps the plus missing for offset = 0. */
2390 addr = XEXP (mem, 0);
754c3d5d
RS
2391 attrs.offset_known_p = true;
2392 attrs.offset = 0;
f6129d66 2393 if (GET_CODE (addr) == PLUS
481683e1 2394 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2395 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2396
f12144dd 2397 set_mem_attrs (mem, &attrs);
f6129d66
RH
2398 MEM_NOTRAP_P (mem) = 1;
2399}
2400\f
23b2ce53
RS
2401/* Return a newly created CODE_LABEL rtx with a unique label number. */
2402
2403rtx
502b8322 2404gen_label_rtx (void)
23b2ce53 2405{
0dc36574 2406 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2407 NULL, label_num++, NULL);
23b2ce53
RS
2408}
2409\f
2410/* For procedure integration. */
2411
23b2ce53 2412/* Install new pointers to the first and last insns in the chain.
86fe05e0 2413 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2414 Used for an inline-procedure after copying the insn chain. */
2415
2416void
502b8322 2417set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2418{
86fe05e0
RK
2419 rtx insn;
2420
5936d944
JH
2421 set_first_insn (first);
2422 set_last_insn (last);
86fe05e0
RK
2423 cur_insn_uid = 0;
2424
b5b8b0ac
AO
2425 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2426 {
2427 int debug_count = 0;
2428
2429 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2430 cur_debug_insn_uid = 0;
2431
2432 for (insn = first; insn; insn = NEXT_INSN (insn))
2433 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2434 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2435 else
2436 {
2437 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2438 if (DEBUG_INSN_P (insn))
2439 debug_count++;
2440 }
2441
2442 if (debug_count)
2443 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2444 else
2445 cur_debug_insn_uid++;
2446 }
2447 else
2448 for (insn = first; insn; insn = NEXT_INSN (insn))
2449 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2450
2451 cur_insn_uid++;
23b2ce53 2452}
23b2ce53 2453\f
750c9258 2454/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2455 structure. This routine should only be called once. */
23b2ce53 2456
fd743bc1 2457static void
b4aaa77b 2458unshare_all_rtl_1 (rtx insn)
23b2ce53 2459{
d1b81779 2460 /* Unshare just about everything else. */
2c07f13b 2461 unshare_all_rtl_in_chain (insn);
750c9258 2462
23b2ce53
RS
2463 /* Make sure the addresses of stack slots found outside the insn chain
2464 (such as, in DECL_RTL of a variable) are not shared
2465 with the insn chain.
2466
2467 This special care is necessary when the stack slot MEM does not
2468 actually appear in the insn chain. If it does appear, its address
2469 is unshared from all else at that point. */
242b0ce6 2470 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2471}
2472
750c9258 2473/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2474 structure, again. This is a fairly expensive thing to do so it
2475 should be done sparingly. */
2476
2477void
502b8322 2478unshare_all_rtl_again (rtx insn)
d1b81779
GK
2479{
2480 rtx p;
624c87aa
RE
2481 tree decl;
2482
d1b81779 2483 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2484 if (INSN_P (p))
d1b81779
GK
2485 {
2486 reset_used_flags (PATTERN (p));
2487 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2488 if (CALL_P (p))
2489 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2490 }
624c87aa 2491
2d4aecb3 2492 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2493 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2494
624c87aa 2495 /* Make sure that virtual parameters are not shared. */
910ad8de 2496 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2497 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2498
2499 reset_used_flags (stack_slot_list);
2500
b4aaa77b 2501 unshare_all_rtl_1 (insn);
fd743bc1
PB
2502}
2503
c2924966 2504unsigned int
fd743bc1
PB
2505unshare_all_rtl (void)
2506{
b4aaa77b 2507 unshare_all_rtl_1 (get_insns ());
c2924966 2508 return 0;
d1b81779
GK
2509}
2510
ef330312 2511
2c07f13b
JH
2512/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2513 Recursively does the same for subexpressions. */
2514
2515static void
2516verify_rtx_sharing (rtx orig, rtx insn)
2517{
2518 rtx x = orig;
2519 int i;
2520 enum rtx_code code;
2521 const char *format_ptr;
2522
2523 if (x == 0)
2524 return;
2525
2526 code = GET_CODE (x);
2527
2528 /* These types may be freely shared. */
2529
2530 switch (code)
2531 {
2532 case REG:
0ca5af51
AO
2533 case DEBUG_EXPR:
2534 case VALUE:
d8116890 2535 CASE_CONST_ANY:
2c07f13b
JH
2536 case SYMBOL_REF:
2537 case LABEL_REF:
2538 case CODE_LABEL:
2539 case PC:
2540 case CC0:
3810076b 2541 case RETURN:
26898771 2542 case SIMPLE_RETURN:
2c07f13b 2543 case SCRATCH:
2c07f13b 2544 return;
3e89ed8d
JH
2545 /* SCRATCH must be shared because they represent distinct values. */
2546 case CLOBBER:
2547 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2548 return;
2549 break;
2c07f13b
JH
2550
2551 case CONST:
6fb5fa3c 2552 if (shared_const_p (orig))
2c07f13b
JH
2553 return;
2554 break;
2555
2556 case MEM:
2557 /* A MEM is allowed to be shared if its address is constant. */
2558 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2559 || reload_completed || reload_in_progress)
2560 return;
2561
2562 break;
2563
2564 default:
2565 break;
2566 }
2567
2568 /* This rtx may not be shared. If it has already been seen,
2569 replace it with a copy of itself. */
1a2caa7a 2570#ifdef ENABLE_CHECKING
2c07f13b
JH
2571 if (RTX_FLAG (x, used))
2572 {
ab532386 2573 error ("invalid rtl sharing found in the insn");
2c07f13b 2574 debug_rtx (insn);
ab532386 2575 error ("shared rtx");
2c07f13b 2576 debug_rtx (x);
ab532386 2577 internal_error ("internal consistency failure");
2c07f13b 2578 }
1a2caa7a
NS
2579#endif
2580 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2581
2c07f13b
JH
2582 RTX_FLAG (x, used) = 1;
2583
6614fd40 2584 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2585
2586 format_ptr = GET_RTX_FORMAT (code);
2587
2588 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2589 {
2590 switch (*format_ptr++)
2591 {
2592 case 'e':
2593 verify_rtx_sharing (XEXP (x, i), insn);
2594 break;
2595
2596 case 'E':
2597 if (XVEC (x, i) != NULL)
2598 {
2599 int j;
2600 int len = XVECLEN (x, i);
2601
2602 for (j = 0; j < len; j++)
2603 {
1a2caa7a
NS
2604 /* We allow sharing of ASM_OPERANDS inside single
2605 instruction. */
2c07f13b 2606 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2607 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2608 == ASM_OPERANDS))
2c07f13b
JH
2609 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2610 else
2611 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2612 }
2613 }
2614 break;
2615 }
2616 }
2617 return;
2618}
2619
ba228239 2620/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2621 sharing in between the subexpressions. */
2622
24e47c76 2623DEBUG_FUNCTION void
2c07f13b
JH
2624verify_rtl_sharing (void)
2625{
2626 rtx p;
2627
a222c01a
MM
2628 timevar_push (TV_VERIFY_RTL_SHARING);
2629
2c07f13b
JH
2630 for (p = get_insns (); p; p = NEXT_INSN (p))
2631 if (INSN_P (p))
2632 {
2633 reset_used_flags (PATTERN (p));
2634 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2635 if (CALL_P (p))
2636 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2954a813
KK
2637 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2638 {
2639 int i;
2640 rtx q, sequence = PATTERN (p);
2641
2642 for (i = 0; i < XVECLEN (sequence, 0); i++)
2643 {
2644 q = XVECEXP (sequence, 0, i);
2645 gcc_assert (INSN_P (q));
2646 reset_used_flags (PATTERN (q));
2647 reset_used_flags (REG_NOTES (q));
776bebcd
JJ
2648 if (CALL_P (q))
2649 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2954a813
KK
2650 }
2651 }
2c07f13b
JH
2652 }
2653
2654 for (p = get_insns (); p; p = NEXT_INSN (p))
2655 if (INSN_P (p))
2656 {
2657 verify_rtx_sharing (PATTERN (p), p);
2658 verify_rtx_sharing (REG_NOTES (p), p);
776bebcd
JJ
2659 if (CALL_P (p))
2660 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2c07f13b 2661 }
a222c01a
MM
2662
2663 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2664}
2665
d1b81779
GK
2666/* Go through all the RTL insn bodies and copy any invalid shared structure.
2667 Assumes the mark bits are cleared at entry. */
2668
2c07f13b
JH
2669void
2670unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2671{
2672 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2673 if (INSN_P (insn))
d1b81779
GK
2674 {
2675 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2676 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2677 if (CALL_P (insn))
2678 CALL_INSN_FUNCTION_USAGE (insn)
2679 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2680 }
2681}
2682
2d4aecb3 2683/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2684 shared. We never replace the DECL_RTLs themselves with a copy,
2685 but expressions mentioned into a DECL_RTL cannot be shared with
2686 expressions in the instruction stream.
2687
2688 Note that reload may convert pseudo registers into memories in-place.
2689 Pseudo registers are always shared, but MEMs never are. Thus if we
2690 reset the used flags on MEMs in the instruction stream, we must set
2691 them again on MEMs that appear in DECL_RTLs. */
2692
2d4aecb3 2693static void
5eb2a9f2 2694set_used_decls (tree blk)
2d4aecb3
AO
2695{
2696 tree t;
2697
2698 /* Mark decls. */
910ad8de 2699 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2700 if (DECL_RTL_SET_P (t))
5eb2a9f2 2701 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2702
2703 /* Now process sub-blocks. */
87caf699 2704 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2705 set_used_decls (t);
2d4aecb3
AO
2706}
2707
23b2ce53 2708/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2709 Recursively does the same for subexpressions. Uses
2710 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2711
2712rtx
502b8322 2713copy_rtx_if_shared (rtx orig)
23b2ce53 2714{
32b32b16
AP
2715 copy_rtx_if_shared_1 (&orig);
2716 return orig;
2717}
2718
ff954f39
AP
2719/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2720 use. Recursively does the same for subexpressions. */
2721
32b32b16
AP
2722static void
2723copy_rtx_if_shared_1 (rtx *orig1)
2724{
2725 rtx x;
b3694847
SS
2726 int i;
2727 enum rtx_code code;
32b32b16 2728 rtx *last_ptr;
b3694847 2729 const char *format_ptr;
23b2ce53 2730 int copied = 0;
32b32b16
AP
2731 int length;
2732
2733 /* Repeat is used to turn tail-recursion into iteration. */
2734repeat:
2735 x = *orig1;
23b2ce53
RS
2736
2737 if (x == 0)
32b32b16 2738 return;
23b2ce53
RS
2739
2740 code = GET_CODE (x);
2741
2742 /* These types may be freely shared. */
2743
2744 switch (code)
2745 {
2746 case REG:
0ca5af51
AO
2747 case DEBUG_EXPR:
2748 case VALUE:
d8116890 2749 CASE_CONST_ANY:
23b2ce53 2750 case SYMBOL_REF:
2c07f13b 2751 case LABEL_REF:
23b2ce53
RS
2752 case CODE_LABEL:
2753 case PC:
2754 case CC0:
276e0224 2755 case RETURN:
26898771 2756 case SIMPLE_RETURN:
23b2ce53 2757 case SCRATCH:
0f41302f 2758 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2759 return;
3e89ed8d
JH
2760 case CLOBBER:
2761 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2762 return;
2763 break;
23b2ce53 2764
b851ea09 2765 case CONST:
6fb5fa3c 2766 if (shared_const_p (x))
32b32b16 2767 return;
b851ea09
RK
2768 break;
2769
b5b8b0ac 2770 case DEBUG_INSN:
23b2ce53
RS
2771 case INSN:
2772 case JUMP_INSN:
2773 case CALL_INSN:
2774 case NOTE:
23b2ce53
RS
2775 case BARRIER:
2776 /* The chain of insns is not being copied. */
32b32b16 2777 return;
23b2ce53 2778
e9a25f70
JL
2779 default:
2780 break;
23b2ce53
RS
2781 }
2782
2783 /* This rtx may not be shared. If it has already been seen,
2784 replace it with a copy of itself. */
2785
2adc7f12 2786 if (RTX_FLAG (x, used))
23b2ce53 2787 {
aacd3885 2788 x = shallow_copy_rtx (x);
23b2ce53
RS
2789 copied = 1;
2790 }
2adc7f12 2791 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2792
2793 /* Now scan the subexpressions recursively.
2794 We can store any replaced subexpressions directly into X
2795 since we know X is not shared! Any vectors in X
2796 must be copied if X was copied. */
2797
2798 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2799 length = GET_RTX_LENGTH (code);
2800 last_ptr = NULL;
b8698a0f 2801
32b32b16 2802 for (i = 0; i < length; i++)
23b2ce53
RS
2803 {
2804 switch (*format_ptr++)
2805 {
2806 case 'e':
32b32b16
AP
2807 if (last_ptr)
2808 copy_rtx_if_shared_1 (last_ptr);
2809 last_ptr = &XEXP (x, i);
23b2ce53
RS
2810 break;
2811
2812 case 'E':
2813 if (XVEC (x, i) != NULL)
2814 {
b3694847 2815 int j;
f0722107 2816 int len = XVECLEN (x, i);
b8698a0f 2817
6614fd40
KH
2818 /* Copy the vector iff I copied the rtx and the length
2819 is nonzero. */
f0722107 2820 if (copied && len > 0)
8f985ec4 2821 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2822
5d3cc252 2823 /* Call recursively on all inside the vector. */
f0722107 2824 for (j = 0; j < len; j++)
32b32b16
AP
2825 {
2826 if (last_ptr)
2827 copy_rtx_if_shared_1 (last_ptr);
2828 last_ptr = &XVECEXP (x, i, j);
2829 }
23b2ce53
RS
2830 }
2831 break;
2832 }
2833 }
32b32b16
AP
2834 *orig1 = x;
2835 if (last_ptr)
2836 {
2837 orig1 = last_ptr;
2838 goto repeat;
2839 }
2840 return;
23b2ce53
RS
2841}
2842
76369a82 2843/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 2844
76369a82
NF
2845static void
2846mark_used_flags (rtx x, int flag)
23b2ce53 2847{
b3694847
SS
2848 int i, j;
2849 enum rtx_code code;
2850 const char *format_ptr;
32b32b16 2851 int length;
23b2ce53 2852
32b32b16
AP
2853 /* Repeat is used to turn tail-recursion into iteration. */
2854repeat:
23b2ce53
RS
2855 if (x == 0)
2856 return;
2857
2858 code = GET_CODE (x);
2859
9faa82d8 2860 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2861 for them. */
2862
2863 switch (code)
2864 {
2865 case REG:
0ca5af51
AO
2866 case DEBUG_EXPR:
2867 case VALUE:
d8116890 2868 CASE_CONST_ANY:
23b2ce53
RS
2869 case SYMBOL_REF:
2870 case CODE_LABEL:
2871 case PC:
2872 case CC0:
276e0224 2873 case RETURN:
26898771 2874 case SIMPLE_RETURN:
23b2ce53
RS
2875 return;
2876
b5b8b0ac 2877 case DEBUG_INSN:
23b2ce53
RS
2878 case INSN:
2879 case JUMP_INSN:
2880 case CALL_INSN:
2881 case NOTE:
2882 case LABEL_REF:
2883 case BARRIER:
2884 /* The chain of insns is not being copied. */
2885 return;
750c9258 2886
e9a25f70
JL
2887 default:
2888 break;
23b2ce53
RS
2889 }
2890
76369a82 2891 RTX_FLAG (x, used) = flag;
23b2ce53
RS
2892
2893 format_ptr = GET_RTX_FORMAT (code);
32b32b16 2894 length = GET_RTX_LENGTH (code);
b8698a0f 2895
32b32b16 2896 for (i = 0; i < length; i++)
23b2ce53
RS
2897 {
2898 switch (*format_ptr++)
2899 {
2900 case 'e':
32b32b16
AP
2901 if (i == length-1)
2902 {
2903 x = XEXP (x, i);
2904 goto repeat;
2905 }
76369a82 2906 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
2907 break;
2908
2909 case 'E':
2910 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 2911 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
2912 break;
2913 }
2914 }
2915}
2c07f13b 2916
76369a82 2917/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
2918 to look for shared sub-parts. */
2919
2920void
76369a82 2921reset_used_flags (rtx x)
2c07f13b 2922{
76369a82
NF
2923 mark_used_flags (x, 0);
2924}
2c07f13b 2925
76369a82
NF
2926/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2927 to look for shared sub-parts. */
2c07f13b 2928
76369a82
NF
2929void
2930set_used_flags (rtx x)
2931{
2932 mark_used_flags (x, 1);
2c07f13b 2933}
23b2ce53
RS
2934\f
2935/* Copy X if necessary so that it won't be altered by changes in OTHER.
2936 Return X or the rtx for the pseudo reg the value of X was copied into.
2937 OTHER must be valid as a SET_DEST. */
2938
2939rtx
502b8322 2940make_safe_from (rtx x, rtx other)
23b2ce53
RS
2941{
2942 while (1)
2943 switch (GET_CODE (other))
2944 {
2945 case SUBREG:
2946 other = SUBREG_REG (other);
2947 break;
2948 case STRICT_LOW_PART:
2949 case SIGN_EXTEND:
2950 case ZERO_EXTEND:
2951 other = XEXP (other, 0);
2952 break;
2953 default:
2954 goto done;
2955 }
2956 done:
3c0cb5de 2957 if ((MEM_P (other)
23b2ce53 2958 && ! CONSTANT_P (x)
f8cfc6aa 2959 && !REG_P (x)
23b2ce53 2960 && GET_CODE (x) != SUBREG)
f8cfc6aa 2961 || (REG_P (other)
23b2ce53
RS
2962 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2963 || reg_mentioned_p (other, x))))
2964 {
2965 rtx temp = gen_reg_rtx (GET_MODE (x));
2966 emit_move_insn (temp, x);
2967 return temp;
2968 }
2969 return x;
2970}
2971\f
2972/* Emission of insns (adding them to the doubly-linked list). */
2973
23b2ce53
RS
2974/* Return the last insn emitted, even if it is in a sequence now pushed. */
2975
2976rtx
502b8322 2977get_last_insn_anywhere (void)
23b2ce53
RS
2978{
2979 struct sequence_stack *stack;
5936d944
JH
2980 if (get_last_insn ())
2981 return get_last_insn ();
49ad7cfa 2982 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2983 if (stack->last != 0)
2984 return stack->last;
2985 return 0;
2986}
2987
2a496e8b
JDA
2988/* Return the first nonnote insn emitted in current sequence or current
2989 function. This routine looks inside SEQUENCEs. */
2990
2991rtx
502b8322 2992get_first_nonnote_insn (void)
2a496e8b 2993{
5936d944 2994 rtx insn = get_insns ();
91373fe8
JDA
2995
2996 if (insn)
2997 {
2998 if (NOTE_P (insn))
2999 for (insn = next_insn (insn);
3000 insn && NOTE_P (insn);
3001 insn = next_insn (insn))
3002 continue;
3003 else
3004 {
2ca202e7 3005 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
3006 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3007 insn = XVECEXP (PATTERN (insn), 0, 0);
3008 }
3009 }
2a496e8b
JDA
3010
3011 return insn;
3012}
3013
3014/* Return the last nonnote insn emitted in current sequence or current
3015 function. This routine looks inside SEQUENCEs. */
3016
3017rtx
502b8322 3018get_last_nonnote_insn (void)
2a496e8b 3019{
5936d944 3020 rtx insn = get_last_insn ();
91373fe8
JDA
3021
3022 if (insn)
3023 {
3024 if (NOTE_P (insn))
3025 for (insn = previous_insn (insn);
3026 insn && NOTE_P (insn);
3027 insn = previous_insn (insn))
3028 continue;
3029 else
3030 {
2ca202e7 3031 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
3032 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3033 insn = XVECEXP (PATTERN (insn), 0,
3034 XVECLEN (PATTERN (insn), 0) - 1);
3035 }
3036 }
2a496e8b
JDA
3037
3038 return insn;
3039}
3040
b5b8b0ac
AO
3041/* Return the number of actual (non-debug) insns emitted in this
3042 function. */
3043
3044int
3045get_max_insn_count (void)
3046{
3047 int n = cur_insn_uid;
3048
3049 /* The table size must be stable across -g, to avoid codegen
3050 differences due to debug insns, and not be affected by
3051 -fmin-insn-uid, to avoid excessive table size and to simplify
3052 debugging of -fcompare-debug failures. */
3053 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3054 n -= cur_debug_insn_uid;
3055 else
3056 n -= MIN_NONDEBUG_INSN_UID;
3057
3058 return n;
3059}
3060
23b2ce53
RS
3061\f
3062/* Return the next insn. If it is a SEQUENCE, return the first insn
3063 of the sequence. */
3064
3065rtx
502b8322 3066next_insn (rtx insn)
23b2ce53 3067{
75547801
KG
3068 if (insn)
3069 {
3070 insn = NEXT_INSN (insn);
3071 if (insn && NONJUMP_INSN_P (insn)
3072 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3073 insn = XVECEXP (PATTERN (insn), 0, 0);
3074 }
23b2ce53 3075
75547801 3076 return insn;
23b2ce53
RS
3077}
3078
3079/* Return the previous insn. If it is a SEQUENCE, return the last insn
3080 of the sequence. */
3081
3082rtx
502b8322 3083previous_insn (rtx insn)
23b2ce53 3084{
75547801
KG
3085 if (insn)
3086 {
3087 insn = PREV_INSN (insn);
3088 if (insn && NONJUMP_INSN_P (insn)
3089 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3090 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3091 }
23b2ce53 3092
75547801 3093 return insn;
23b2ce53
RS
3094}
3095
3096/* Return the next insn after INSN that is not a NOTE. This routine does not
3097 look inside SEQUENCEs. */
3098
3099rtx
502b8322 3100next_nonnote_insn (rtx insn)
23b2ce53 3101{
75547801
KG
3102 while (insn)
3103 {
3104 insn = NEXT_INSN (insn);
3105 if (insn == 0 || !NOTE_P (insn))
3106 break;
3107 }
23b2ce53 3108
75547801 3109 return insn;
23b2ce53
RS
3110}
3111
1e211590
DD
3112/* Return the next insn after INSN that is not a NOTE, but stop the
3113 search before we enter another basic block. This routine does not
3114 look inside SEQUENCEs. */
3115
3116rtx
3117next_nonnote_insn_bb (rtx insn)
3118{
3119 while (insn)
3120 {
3121 insn = NEXT_INSN (insn);
3122 if (insn == 0 || !NOTE_P (insn))
3123 break;
3124 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3125 return NULL_RTX;
3126 }
3127
3128 return insn;
3129}
3130
23b2ce53
RS
3131/* Return the previous insn before INSN that is not a NOTE. This routine does
3132 not look inside SEQUENCEs. */
3133
3134rtx
502b8322 3135prev_nonnote_insn (rtx insn)
23b2ce53 3136{
75547801
KG
3137 while (insn)
3138 {
3139 insn = PREV_INSN (insn);
3140 if (insn == 0 || !NOTE_P (insn))
3141 break;
3142 }
23b2ce53 3143
75547801 3144 return insn;
23b2ce53
RS
3145}
3146
896aa4ea
DD
3147/* Return the previous insn before INSN that is not a NOTE, but stop
3148 the search before we enter another basic block. This routine does
3149 not look inside SEQUENCEs. */
3150
3151rtx
3152prev_nonnote_insn_bb (rtx insn)
3153{
3154 while (insn)
3155 {
3156 insn = PREV_INSN (insn);
3157 if (insn == 0 || !NOTE_P (insn))
3158 break;
3159 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3160 return NULL_RTX;
3161 }
3162
3163 return insn;
3164}
3165
b5b8b0ac
AO
3166/* Return the next insn after INSN that is not a DEBUG_INSN. This
3167 routine does not look inside SEQUENCEs. */
3168
3169rtx
3170next_nondebug_insn (rtx insn)
3171{
3172 while (insn)
3173 {
3174 insn = NEXT_INSN (insn);
3175 if (insn == 0 || !DEBUG_INSN_P (insn))
3176 break;
3177 }
3178
3179 return insn;
3180}
3181
3182/* Return the previous insn before INSN that is not a DEBUG_INSN.
3183 This routine does not look inside SEQUENCEs. */
3184
3185rtx
3186prev_nondebug_insn (rtx insn)
3187{
3188 while (insn)
3189 {
3190 insn = PREV_INSN (insn);
3191 if (insn == 0 || !DEBUG_INSN_P (insn))
3192 break;
3193 }
3194
3195 return insn;
3196}
3197
f0fc0803
JJ
3198/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3199 This routine does not look inside SEQUENCEs. */
3200
3201rtx
3202next_nonnote_nondebug_insn (rtx insn)
3203{
3204 while (insn)
3205 {
3206 insn = NEXT_INSN (insn);
3207 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3208 break;
3209 }
3210
3211 return insn;
3212}
3213
3214/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3215 This routine does not look inside SEQUENCEs. */
3216
3217rtx
3218prev_nonnote_nondebug_insn (rtx insn)
3219{
3220 while (insn)
3221 {
3222 insn = PREV_INSN (insn);
3223 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3224 break;
3225 }
3226
3227 return insn;
3228}
3229
23b2ce53
RS
3230/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3231 or 0, if there is none. This routine does not look inside
0f41302f 3232 SEQUENCEs. */
23b2ce53
RS
3233
3234rtx
502b8322 3235next_real_insn (rtx insn)
23b2ce53 3236{
75547801
KG
3237 while (insn)
3238 {
3239 insn = NEXT_INSN (insn);
3240 if (insn == 0 || INSN_P (insn))
3241 break;
3242 }
23b2ce53 3243
75547801 3244 return insn;
23b2ce53
RS
3245}
3246
3247/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3248 or 0, if there is none. This routine does not look inside
3249 SEQUENCEs. */
3250
3251rtx
502b8322 3252prev_real_insn (rtx insn)
23b2ce53 3253{
75547801
KG
3254 while (insn)
3255 {
3256 insn = PREV_INSN (insn);
3257 if (insn == 0 || INSN_P (insn))
3258 break;
3259 }
23b2ce53 3260
75547801 3261 return insn;
23b2ce53
RS
3262}
3263
ee960939
OH
3264/* Return the last CALL_INSN in the current list, or 0 if there is none.
3265 This routine does not look inside SEQUENCEs. */
3266
3267rtx
502b8322 3268last_call_insn (void)
ee960939
OH
3269{
3270 rtx insn;
3271
3272 for (insn = get_last_insn ();
4b4bf941 3273 insn && !CALL_P (insn);
ee960939
OH
3274 insn = PREV_INSN (insn))
3275 ;
3276
3277 return insn;
3278}
3279
23b2ce53 3280/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3281 does not look inside SEQUENCEs. After reload this also skips over
3282 standalone USE and CLOBBER insn. */
23b2ce53 3283
69732dcb 3284int
4f588890 3285active_insn_p (const_rtx insn)
69732dcb 3286{
4b4bf941
JQ
3287 return (CALL_P (insn) || JUMP_P (insn)
3288 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3289 && (! reload_completed
3290 || (GET_CODE (PATTERN (insn)) != USE
3291 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3292}
3293
23b2ce53 3294rtx
502b8322 3295next_active_insn (rtx insn)
23b2ce53 3296{
75547801
KG
3297 while (insn)
3298 {
3299 insn = NEXT_INSN (insn);
3300 if (insn == 0 || active_insn_p (insn))
3301 break;
3302 }
23b2ce53 3303
75547801 3304 return insn;
23b2ce53
RS
3305}
3306
3307/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3308 does not look inside SEQUENCEs. After reload this also skips over
3309 standalone USE and CLOBBER insn. */
23b2ce53
RS
3310
3311rtx
502b8322 3312prev_active_insn (rtx insn)
23b2ce53 3313{
75547801
KG
3314 while (insn)
3315 {
3316 insn = PREV_INSN (insn);
3317 if (insn == 0 || active_insn_p (insn))
3318 break;
3319 }
23b2ce53 3320
75547801 3321 return insn;
23b2ce53
RS
3322}
3323
3324/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3325
3326rtx
502b8322 3327next_label (rtx insn)
23b2ce53 3328{
75547801
KG
3329 while (insn)
3330 {
3331 insn = NEXT_INSN (insn);
3332 if (insn == 0 || LABEL_P (insn))
3333 break;
3334 }
23b2ce53 3335
75547801 3336 return insn;
23b2ce53
RS
3337}
3338
dc0ff1c8
BS
3339/* Return the last label to mark the same position as LABEL. Return LABEL
3340 itself if it is null or any return rtx. */
6c2511d3
RS
3341
3342rtx
3343skip_consecutive_labels (rtx label)
3344{
3345 rtx insn;
3346
dc0ff1c8
BS
3347 if (label && ANY_RETURN_P (label))
3348 return label;
3349
6c2511d3
RS
3350 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3351 if (LABEL_P (insn))
3352 label = insn;
3353
3354 return label;
3355}
23b2ce53
RS
3356\f
3357#ifdef HAVE_cc0
c572e5ba
JVA
3358/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3359 and REG_CC_USER notes so we can find it. */
3360
3361void
502b8322 3362link_cc0_insns (rtx insn)
c572e5ba
JVA
3363{
3364 rtx user = next_nonnote_insn (insn);
3365
4b4bf941 3366 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3367 user = XVECEXP (PATTERN (user), 0, 0);
3368
65c5f2a6
ILT
3369 add_reg_note (user, REG_CC_SETTER, insn);
3370 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3371}
3372
23b2ce53
RS
3373/* Return the next insn that uses CC0 after INSN, which is assumed to
3374 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3375 applied to the result of this function should yield INSN).
3376
3377 Normally, this is simply the next insn. However, if a REG_CC_USER note
3378 is present, it contains the insn that uses CC0.
3379
3380 Return 0 if we can't find the insn. */
3381
3382rtx
502b8322 3383next_cc0_user (rtx insn)
23b2ce53 3384{
906c4e36 3385 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3386
3387 if (note)
3388 return XEXP (note, 0);
3389
3390 insn = next_nonnote_insn (insn);
4b4bf941 3391 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3392 insn = XVECEXP (PATTERN (insn), 0, 0);
3393
2c3c49de 3394 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3395 return insn;
3396
3397 return 0;
3398}
3399
3400/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3401 note, it is the previous insn. */
3402
3403rtx
502b8322 3404prev_cc0_setter (rtx insn)
23b2ce53 3405{
906c4e36 3406 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3407
3408 if (note)
3409 return XEXP (note, 0);
3410
3411 insn = prev_nonnote_insn (insn);
5b0264cb 3412 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3413
3414 return insn;
3415}
3416#endif
e5bef2e4 3417
594f8779
RZ
3418#ifdef AUTO_INC_DEC
3419/* Find a RTX_AUTOINC class rtx which matches DATA. */
3420
3421static int
3422find_auto_inc (rtx *xp, void *data)
3423{
3424 rtx x = *xp;
5ead67f6 3425 rtx reg = (rtx) data;
594f8779
RZ
3426
3427 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3428 return 0;
3429
3430 switch (GET_CODE (x))
3431 {
3432 case PRE_DEC:
3433 case PRE_INC:
3434 case POST_DEC:
3435 case POST_INC:
3436 case PRE_MODIFY:
3437 case POST_MODIFY:
3438 if (rtx_equal_p (reg, XEXP (x, 0)))
3439 return 1;
3440 break;
3441
3442 default:
3443 gcc_unreachable ();
3444 }
3445 return -1;
3446}
3447#endif
3448
e5bef2e4
HB
3449/* Increment the label uses for all labels present in rtx. */
3450
3451static void
502b8322 3452mark_label_nuses (rtx x)
e5bef2e4 3453{
b3694847
SS
3454 enum rtx_code code;
3455 int i, j;
3456 const char *fmt;
e5bef2e4
HB
3457
3458 code = GET_CODE (x);
7537fc90 3459 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3460 LABEL_NUSES (XEXP (x, 0))++;
3461
3462 fmt = GET_RTX_FORMAT (code);
3463 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3464 {
3465 if (fmt[i] == 'e')
0fb7aeda 3466 mark_label_nuses (XEXP (x, i));
e5bef2e4 3467 else if (fmt[i] == 'E')
0fb7aeda 3468 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3469 mark_label_nuses (XVECEXP (x, i, j));
3470 }
3471}
3472
23b2ce53
RS
3473\f
3474/* Try splitting insns that can be split for better scheduling.
3475 PAT is the pattern which might split.
3476 TRIAL is the insn providing PAT.
cc2902df 3477 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3478
3479 If this routine succeeds in splitting, it returns the first or last
11147ebe 3480 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3481 returns TRIAL. If the insn to be returned can be split, it will be. */
3482
3483rtx
502b8322 3484try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3485{
3486 rtx before = PREV_INSN (trial);
3487 rtx after = NEXT_INSN (trial);
23b2ce53 3488 int has_barrier = 0;
4a8cae83 3489 rtx note, seq, tem;
6b24c259 3490 int probability;
599aedd9
RH
3491 rtx insn_last, insn;
3492 int njumps = 0;
6b24c259 3493
cd9c1ca8
RH
3494 /* We're not good at redistributing frame information. */
3495 if (RTX_FRAME_RELATED_P (trial))
3496 return trial;
3497
6b24c259
JH
3498 if (any_condjump_p (trial)
3499 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3500 split_branch_probability = INTVAL (XEXP (note, 0));
3501 probability = split_branch_probability;
3502
3503 seq = split_insns (pat, trial);
3504
3505 split_branch_probability = -1;
23b2ce53
RS
3506
3507 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3508 We may need to handle this specially. */
4b4bf941 3509 if (after && BARRIER_P (after))
23b2ce53
RS
3510 {
3511 has_barrier = 1;
3512 after = NEXT_INSN (after);
3513 }
3514
599aedd9
RH
3515 if (!seq)
3516 return trial;
3517
3518 /* Avoid infinite loop if any insn of the result matches
3519 the original pattern. */
3520 insn_last = seq;
3521 while (1)
23b2ce53 3522 {
599aedd9
RH
3523 if (INSN_P (insn_last)
3524 && rtx_equal_p (PATTERN (insn_last), pat))
3525 return trial;
3526 if (!NEXT_INSN (insn_last))
3527 break;
3528 insn_last = NEXT_INSN (insn_last);
3529 }
750c9258 3530
6fb5fa3c
DB
3531 /* We will be adding the new sequence to the function. The splitters
3532 may have introduced invalid RTL sharing, so unshare the sequence now. */
3533 unshare_all_rtl_in_chain (seq);
3534
599aedd9
RH
3535 /* Mark labels. */
3536 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3537 {
4b4bf941 3538 if (JUMP_P (insn))
599aedd9
RH
3539 {
3540 mark_jump_label (PATTERN (insn), insn, 0);
3541 njumps++;
3542 if (probability != -1
3543 && any_condjump_p (insn)
3544 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3545 {
599aedd9
RH
3546 /* We can preserve the REG_BR_PROB notes only if exactly
3547 one jump is created, otherwise the machine description
3548 is responsible for this step using
3549 split_branch_probability variable. */
5b0264cb 3550 gcc_assert (njumps == 1);
65c5f2a6 3551 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3552 }
599aedd9
RH
3553 }
3554 }
3555
3556 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3557 in SEQ and copy any additional information across. */
4b4bf941 3558 if (CALL_P (trial))
599aedd9
RH
3559 {
3560 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3561 if (CALL_P (insn))
599aedd9 3562 {
65712d5c
RS
3563 rtx next, *p;
3564
3565 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3566 target may have explicitly specified. */
3567 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3568 while (*p)
3569 p = &XEXP (*p, 1);
3570 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3571
3572 /* If the old call was a sibling call, the new one must
3573 be too. */
599aedd9 3574 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3575
3576 /* If the new call is the last instruction in the sequence,
3577 it will effectively replace the old call in-situ. Otherwise
3578 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3579 so that it comes immediately after the new call. */
3580 if (NEXT_INSN (insn))
65f3dedb
RS
3581 for (next = NEXT_INSN (trial);
3582 next && NOTE_P (next);
3583 next = NEXT_INSN (next))
3584 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3585 {
3586 remove_insn (next);
3587 add_insn_after (next, insn, NULL);
65f3dedb 3588 break;
65712d5c 3589 }
599aedd9
RH
3590 }
3591 }
4b5e8abe 3592
599aedd9
RH
3593 /* Copy notes, particularly those related to the CFG. */
3594 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3595 {
3596 switch (REG_NOTE_KIND (note))
3597 {
3598 case REG_EH_REGION:
1d65f45c 3599 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3600 break;
216183ce 3601
599aedd9
RH
3602 case REG_NORETURN:
3603 case REG_SETJMP:
0a35513e 3604 case REG_TM:
594f8779 3605 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3606 {
4b4bf941 3607 if (CALL_P (insn))
65c5f2a6 3608 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3609 }
599aedd9 3610 break;
d6e95df8 3611
599aedd9 3612 case REG_NON_LOCAL_GOTO:
594f8779 3613 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3614 {
4b4bf941 3615 if (JUMP_P (insn))
65c5f2a6 3616 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3617 }
599aedd9 3618 break;
e5bef2e4 3619
594f8779
RZ
3620#ifdef AUTO_INC_DEC
3621 case REG_INC:
3622 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3623 {
3624 rtx reg = XEXP (note, 0);
3625 if (!FIND_REG_INC_NOTE (insn, reg)
3626 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3627 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3628 }
3629 break;
3630#endif
3631
9a08d230
RH
3632 case REG_ARGS_SIZE:
3633 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3634 break;
3635
599aedd9
RH
3636 default:
3637 break;
23b2ce53 3638 }
599aedd9
RH
3639 }
3640
3641 /* If there are LABELS inside the split insns increment the
3642 usage count so we don't delete the label. */
cf7c4aa6 3643 if (INSN_P (trial))
599aedd9
RH
3644 {
3645 insn = insn_last;
3646 while (insn != NULL_RTX)
23b2ce53 3647 {
cf7c4aa6 3648 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3649 if (NONJUMP_INSN_P (insn))
599aedd9 3650 mark_label_nuses (PATTERN (insn));
23b2ce53 3651
599aedd9
RH
3652 insn = PREV_INSN (insn);
3653 }
23b2ce53
RS
3654 }
3655
5368224f 3656 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3657
3658 delete_insn (trial);
3659 if (has_barrier)
3660 emit_barrier_after (tem);
3661
3662 /* Recursively call try_split for each new insn created; by the
3663 time control returns here that insn will be fully split, so
3664 set LAST and continue from the insn after the one returned.
3665 We can't use next_active_insn here since AFTER may be a note.
3666 Ignore deleted insns, which can be occur if not optimizing. */
3667 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3668 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3669 tem = try_split (PATTERN (tem), tem, 1);
3670
3671 /* Return either the first or the last insn, depending on which was
3672 requested. */
3673 return last
5936d944 3674 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3675 : NEXT_INSN (before);
23b2ce53
RS
3676}
3677\f
3678/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3679 Store PATTERN in the pattern slots. */
23b2ce53
RS
3680
3681rtx
502b8322 3682make_insn_raw (rtx pattern)
23b2ce53 3683{
b3694847 3684 rtx insn;
23b2ce53 3685
1f8f4a0b 3686 insn = rtx_alloc (INSN);
23b2ce53 3687
43127294 3688 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3689 PATTERN (insn) = pattern;
3690 INSN_CODE (insn) = -1;
1632afca 3691 REG_NOTES (insn) = NULL;
5368224f 3692 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3693 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3694
47984720
NC
3695#ifdef ENABLE_RTL_CHECKING
3696 if (insn
2c3c49de 3697 && INSN_P (insn)
47984720
NC
3698 && (returnjump_p (insn)
3699 || (GET_CODE (insn) == SET
3700 && SET_DEST (insn) == pc_rtx)))
3701 {
d4ee4d25 3702 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3703 debug_rtx (insn);
3704 }
3705#endif
750c9258 3706
23b2ce53
RS
3707 return insn;
3708}
3709
b5b8b0ac
AO
3710/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3711
e4da1e17 3712static rtx
b5b8b0ac
AO
3713make_debug_insn_raw (rtx pattern)
3714{
3715 rtx insn;
3716
3717 insn = rtx_alloc (DEBUG_INSN);
3718 INSN_UID (insn) = cur_debug_insn_uid++;
3719 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3720 INSN_UID (insn) = cur_insn_uid++;
3721
3722 PATTERN (insn) = pattern;
3723 INSN_CODE (insn) = -1;
3724 REG_NOTES (insn) = NULL;
5368224f 3725 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3726 BLOCK_FOR_INSN (insn) = NULL;
3727
3728 return insn;
3729}
3730
2f937369 3731/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3732
e4da1e17 3733static rtx
502b8322 3734make_jump_insn_raw (rtx pattern)
23b2ce53 3735{
b3694847 3736 rtx insn;
23b2ce53 3737
4b1f5e8c 3738 insn = rtx_alloc (JUMP_INSN);
1632afca 3739 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3740
3741 PATTERN (insn) = pattern;
3742 INSN_CODE (insn) = -1;
1632afca
RS
3743 REG_NOTES (insn) = NULL;
3744 JUMP_LABEL (insn) = NULL;
5368224f 3745 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3746 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3747
3748 return insn;
3749}
aff507f4 3750
2f937369 3751/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3752
3753static rtx
502b8322 3754make_call_insn_raw (rtx pattern)
aff507f4 3755{
b3694847 3756 rtx insn;
aff507f4
RK
3757
3758 insn = rtx_alloc (CALL_INSN);
3759 INSN_UID (insn) = cur_insn_uid++;
3760
3761 PATTERN (insn) = pattern;
3762 INSN_CODE (insn) = -1;
aff507f4
RK
3763 REG_NOTES (insn) = NULL;
3764 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3765 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3766 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3767
3768 return insn;
3769}
23b2ce53
RS
3770\f
3771/* Add INSN to the end of the doubly-linked list.
3772 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3773
3774void
502b8322 3775add_insn (rtx insn)
23b2ce53 3776{
5936d944 3777 PREV_INSN (insn) = get_last_insn();
23b2ce53
RS
3778 NEXT_INSN (insn) = 0;
3779
5936d944
JH
3780 if (NULL != get_last_insn())
3781 NEXT_INSN (get_last_insn ()) = insn;
23b2ce53 3782
5936d944
JH
3783 if (NULL == get_insns ())
3784 set_first_insn (insn);
23b2ce53 3785
5936d944 3786 set_last_insn (insn);
23b2ce53
RS
3787}
3788
a0ae8e8d
RK
3789/* Add INSN into the doubly-linked list after insn AFTER. This and
3790 the next should be the only functions called to insert an insn once
ba213285 3791 delay slots have been filled since only they know how to update a
a0ae8e8d 3792 SEQUENCE. */
23b2ce53
RS
3793
3794void
6fb5fa3c 3795add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3796{
3797 rtx next = NEXT_INSN (after);
3798
5b0264cb 3799 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3800
23b2ce53
RS
3801 NEXT_INSN (insn) = next;
3802 PREV_INSN (insn) = after;
3803
3804 if (next)
3805 {
3806 PREV_INSN (next) = insn;
4b4bf941 3807 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3808 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3809 }
5936d944
JH
3810 else if (get_last_insn () == after)
3811 set_last_insn (insn);
23b2ce53
RS
3812 else
3813 {
49ad7cfa 3814 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3815 /* Scan all pending sequences too. */
3816 for (; stack; stack = stack->next)
3817 if (after == stack->last)
fef0509b
RK
3818 {
3819 stack->last = insn;
3820 break;
3821 }
a0ae8e8d 3822
5b0264cb 3823 gcc_assert (stack);
23b2ce53
RS
3824 }
3825
4b4bf941
JQ
3826 if (!BARRIER_P (after)
3827 && !BARRIER_P (insn)
3c030e88
JH
3828 && (bb = BLOCK_FOR_INSN (after)))
3829 {
3830 set_block_for_insn (insn, bb);
38c1593d 3831 if (INSN_P (insn))
6fb5fa3c 3832 df_insn_rescan (insn);
3c030e88 3833 /* Should not happen as first in the BB is always
a1f300c0 3834 either NOTE or LABEL. */
a813c111 3835 if (BB_END (bb) == after
3c030e88 3836 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3837 && !BARRIER_P (insn)
a38e7aa5 3838 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3839 BB_END (bb) = insn;
3c030e88
JH
3840 }
3841
23b2ce53 3842 NEXT_INSN (after) = insn;
4b4bf941 3843 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3844 {
3845 rtx sequence = PATTERN (after);
3846 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3847 }
3848}
3849
a0ae8e8d 3850/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3851 the previous should be the only functions called to insert an insn
3852 once delay slots have been filled since only they know how to
3853 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3854 bb from before. */
a0ae8e8d
RK
3855
3856void
6fb5fa3c 3857add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3858{
3859 rtx prev = PREV_INSN (before);
3860
5b0264cb 3861 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3862
a0ae8e8d
RK
3863 PREV_INSN (insn) = prev;
3864 NEXT_INSN (insn) = before;
3865
3866 if (prev)
3867 {
3868 NEXT_INSN (prev) = insn;
4b4bf941 3869 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3870 {
3871 rtx sequence = PATTERN (prev);
3872 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3873 }
3874 }
5936d944
JH
3875 else if (get_insns () == before)
3876 set_first_insn (insn);
a0ae8e8d
RK
3877 else
3878 {
49ad7cfa 3879 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3880 /* Scan all pending sequences too. */
3881 for (; stack; stack = stack->next)
3882 if (before == stack->first)
fef0509b
RK
3883 {
3884 stack->first = insn;
3885 break;
3886 }
a0ae8e8d 3887
5b0264cb 3888 gcc_assert (stack);
a0ae8e8d
RK
3889 }
3890
b8698a0f 3891 if (!bb
6fb5fa3c
DB
3892 && !BARRIER_P (before)
3893 && !BARRIER_P (insn))
3894 bb = BLOCK_FOR_INSN (before);
3895
3896 if (bb)
3c030e88
JH
3897 {
3898 set_block_for_insn (insn, bb);
38c1593d 3899 if (INSN_P (insn))
6fb5fa3c 3900 df_insn_rescan (insn);
5b0264cb 3901 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3902 LABEL. */
5b0264cb
NS
3903 gcc_assert (BB_HEAD (bb) != insn
3904 /* Avoid clobbering of structure when creating new BB. */
3905 || BARRIER_P (insn)
a38e7aa5 3906 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3907 }
3908
a0ae8e8d 3909 PREV_INSN (before) = insn;
4b4bf941 3910 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3911 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3912}
3913
6fb5fa3c
DB
3914
3915/* Replace insn with an deleted instruction note. */
3916
0ce2b299
EB
3917void
3918set_insn_deleted (rtx insn)
6fb5fa3c
DB
3919{
3920 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3921 PUT_CODE (insn, NOTE);
3922 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3923}
3924
3925
89e99eea
DB
3926/* Remove an insn from its doubly-linked list. This function knows how
3927 to handle sequences. */
3928void
502b8322 3929remove_insn (rtx insn)
89e99eea
DB
3930{
3931 rtx next = NEXT_INSN (insn);
3932 rtx prev = PREV_INSN (insn);
53c17031
JH
3933 basic_block bb;
3934
6fb5fa3c
DB
3935 /* Later in the code, the block will be marked dirty. */
3936 df_insn_delete (NULL, INSN_UID (insn));
3937
89e99eea
DB
3938 if (prev)
3939 {
3940 NEXT_INSN (prev) = next;
4b4bf941 3941 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3942 {
3943 rtx sequence = PATTERN (prev);
3944 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3945 }
3946 }
5936d944
JH
3947 else if (get_insns () == insn)
3948 {
fb9ef4c1
JH
3949 if (next)
3950 PREV_INSN (next) = NULL;
5936d944
JH
3951 set_first_insn (next);
3952 }
89e99eea
DB
3953 else
3954 {
49ad7cfa 3955 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3956 /* Scan all pending sequences too. */
3957 for (; stack; stack = stack->next)
3958 if (insn == stack->first)
3959 {
3960 stack->first = next;
3961 break;
3962 }
3963
5b0264cb 3964 gcc_assert (stack);
89e99eea
DB
3965 }
3966
3967 if (next)
3968 {
3969 PREV_INSN (next) = prev;
4b4bf941 3970 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3971 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3972 }
5936d944
JH
3973 else if (get_last_insn () == insn)
3974 set_last_insn (prev);
89e99eea
DB
3975 else
3976 {
49ad7cfa 3977 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3978 /* Scan all pending sequences too. */
3979 for (; stack; stack = stack->next)
3980 if (insn == stack->last)
3981 {
3982 stack->last = prev;
3983 break;
3984 }
3985
5b0264cb 3986 gcc_assert (stack);
89e99eea 3987 }
4b4bf941 3988 if (!BARRIER_P (insn)
53c17031
JH
3989 && (bb = BLOCK_FOR_INSN (insn)))
3990 {
4e0084e4 3991 if (NONDEBUG_INSN_P (insn))
6fb5fa3c 3992 df_set_bb_dirty (bb);
a813c111 3993 if (BB_HEAD (bb) == insn)
53c17031 3994 {
3bf1e984
RK
3995 /* Never ever delete the basic block note without deleting whole
3996 basic block. */
5b0264cb 3997 gcc_assert (!NOTE_P (insn));
a813c111 3998 BB_HEAD (bb) = next;
53c17031 3999 }
a813c111
SB
4000 if (BB_END (bb) == insn)
4001 BB_END (bb) = prev;
53c17031 4002 }
89e99eea
DB
4003}
4004
ee960939
OH
4005/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4006
4007void
502b8322 4008add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4009{
5b0264cb 4010 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4011
4012 /* Put the register usage information on the CALL. If there is already
4013 some usage information, put ours at the end. */
4014 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4015 {
4016 rtx link;
4017
4018 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4019 link = XEXP (link, 1))
4020 ;
4021
4022 XEXP (link, 1) = call_fusage;
4023 }
4024 else
4025 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4026}
4027
23b2ce53
RS
4028/* Delete all insns made since FROM.
4029 FROM becomes the new last instruction. */
4030
4031void
502b8322 4032delete_insns_since (rtx from)
23b2ce53
RS
4033{
4034 if (from == 0)
5936d944 4035 set_first_insn (0);
23b2ce53
RS
4036 else
4037 NEXT_INSN (from) = 0;
5936d944 4038 set_last_insn (from);
23b2ce53
RS
4039}
4040
5dab5552
MS
4041/* This function is deprecated, please use sequences instead.
4042
4043 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4044 The insns to be moved are those between FROM and TO.
4045 They are moved to a new position after the insn AFTER.
4046 AFTER must not be FROM or TO or any insn in between.
4047
4048 This function does not know about SEQUENCEs and hence should not be
4049 called after delay-slot filling has been done. */
4050
4051void
502b8322 4052reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53 4053{
4f8344eb
HPN
4054#ifdef ENABLE_CHECKING
4055 rtx x;
4056 for (x = from; x != to; x = NEXT_INSN (x))
4057 gcc_assert (after != x);
4058 gcc_assert (after != to);
4059#endif
4060
23b2ce53
RS
4061 /* Splice this bunch out of where it is now. */
4062 if (PREV_INSN (from))
4063 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4064 if (NEXT_INSN (to))
4065 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4066 if (get_last_insn () == to)
4067 set_last_insn (PREV_INSN (from));
4068 if (get_insns () == from)
4069 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4070
4071 /* Make the new neighbors point to it and it to them. */
4072 if (NEXT_INSN (after))
4073 PREV_INSN (NEXT_INSN (after)) = to;
4074
4075 NEXT_INSN (to) = NEXT_INSN (after);
4076 PREV_INSN (from) = after;
4077 NEXT_INSN (after) = from;
5936d944
JH
4078 if (after == get_last_insn())
4079 set_last_insn (to);
23b2ce53
RS
4080}
4081
3c030e88
JH
4082/* Same as function above, but take care to update BB boundaries. */
4083void
502b8322 4084reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
4085{
4086 rtx prev = PREV_INSN (from);
4087 basic_block bb, bb2;
4088
4089 reorder_insns_nobb (from, to, after);
4090
4b4bf941 4091 if (!BARRIER_P (after)
3c030e88
JH
4092 && (bb = BLOCK_FOR_INSN (after)))
4093 {
4094 rtx x;
6fb5fa3c 4095 df_set_bb_dirty (bb);
68252e27 4096
4b4bf941 4097 if (!BARRIER_P (from)
3c030e88
JH
4098 && (bb2 = BLOCK_FOR_INSN (from)))
4099 {
a813c111
SB
4100 if (BB_END (bb2) == to)
4101 BB_END (bb2) = prev;
6fb5fa3c 4102 df_set_bb_dirty (bb2);
3c030e88
JH
4103 }
4104
a813c111
SB
4105 if (BB_END (bb) == after)
4106 BB_END (bb) = to;
3c030e88
JH
4107
4108 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4109 if (!BARRIER_P (x))
63642d5a 4110 df_insn_change_bb (x, bb);
3c030e88
JH
4111 }
4112}
4113
23b2ce53 4114\f
2f937369
DM
4115/* Emit insn(s) of given code and pattern
4116 at a specified place within the doubly-linked list.
23b2ce53 4117
2f937369
DM
4118 All of the emit_foo global entry points accept an object
4119 X which is either an insn list or a PATTERN of a single
4120 instruction.
23b2ce53 4121
2f937369
DM
4122 There are thus a few canonical ways to generate code and
4123 emit it at a specific place in the instruction stream. For
4124 example, consider the instruction named SPOT and the fact that
4125 we would like to emit some instructions before SPOT. We might
4126 do it like this:
23b2ce53 4127
2f937369
DM
4128 start_sequence ();
4129 ... emit the new instructions ...
4130 insns_head = get_insns ();
4131 end_sequence ();
23b2ce53 4132
2f937369 4133 emit_insn_before (insns_head, SPOT);
23b2ce53 4134
2f937369
DM
4135 It used to be common to generate SEQUENCE rtl instead, but that
4136 is a relic of the past which no longer occurs. The reason is that
4137 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4138 generated would almost certainly die right after it was created. */
23b2ce53 4139
5f02387d
NF
4140static rtx
4141emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4142 rtx (*make_raw) (rtx))
23b2ce53 4143{
b3694847 4144 rtx insn;
23b2ce53 4145
5b0264cb 4146 gcc_assert (before);
2f937369
DM
4147
4148 if (x == NULL_RTX)
4149 return last;
4150
4151 switch (GET_CODE (x))
23b2ce53 4152 {
b5b8b0ac 4153 case DEBUG_INSN:
2f937369
DM
4154 case INSN:
4155 case JUMP_INSN:
4156 case CALL_INSN:
4157 case CODE_LABEL:
4158 case BARRIER:
4159 case NOTE:
4160 insn = x;
4161 while (insn)
4162 {
4163 rtx next = NEXT_INSN (insn);
6fb5fa3c 4164 add_insn_before (insn, before, bb);
2f937369
DM
4165 last = insn;
4166 insn = next;
4167 }
4168 break;
4169
4170#ifdef ENABLE_RTL_CHECKING
4171 case SEQUENCE:
5b0264cb 4172 gcc_unreachable ();
2f937369
DM
4173 break;
4174#endif
4175
4176 default:
5f02387d 4177 last = (*make_raw) (x);
6fb5fa3c 4178 add_insn_before (last, before, bb);
2f937369 4179 break;
23b2ce53
RS
4180 }
4181
2f937369 4182 return last;
23b2ce53
RS
4183}
4184
5f02387d
NF
4185/* Make X be output before the instruction BEFORE. */
4186
4187rtx
4188emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4189{
4190 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4191}
4192
2f937369 4193/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4194 and output it before the instruction BEFORE. */
4195
4196rtx
a7102479 4197emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4198{
5f02387d
NF
4199 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4200 make_jump_insn_raw);
23b2ce53
RS
4201}
4202
2f937369 4203/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4204 and output it before the instruction BEFORE. */
4205
4206rtx
a7102479 4207emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4208{
5f02387d
NF
4209 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4210 make_call_insn_raw);
969d70ca
JH
4211}
4212
b5b8b0ac
AO
4213/* Make an instruction with body X and code DEBUG_INSN
4214 and output it before the instruction BEFORE. */
4215
4216rtx
4217emit_debug_insn_before_noloc (rtx x, rtx before)
4218{
5f02387d
NF
4219 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4220 make_debug_insn_raw);
b5b8b0ac
AO
4221}
4222
23b2ce53 4223/* Make an insn of code BARRIER
e881bb1b 4224 and output it before the insn BEFORE. */
23b2ce53
RS
4225
4226rtx
502b8322 4227emit_barrier_before (rtx before)
23b2ce53 4228{
b3694847 4229 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4230
4231 INSN_UID (insn) = cur_insn_uid++;
4232
6fb5fa3c 4233 add_insn_before (insn, before, NULL);
23b2ce53
RS
4234 return insn;
4235}
4236
e881bb1b
RH
4237/* Emit the label LABEL before the insn BEFORE. */
4238
4239rtx
502b8322 4240emit_label_before (rtx label, rtx before)
e881bb1b 4241{
468660d3
SB
4242 gcc_checking_assert (INSN_UID (label) == 0);
4243 INSN_UID (label) = cur_insn_uid++;
4244 add_insn_before (label, before, NULL);
e881bb1b
RH
4245 return label;
4246}
4247
23b2ce53
RS
4248/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4249
4250rtx
a38e7aa5 4251emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 4252{
b3694847 4253 rtx note = rtx_alloc (NOTE);
23b2ce53 4254 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4255 NOTE_KIND (note) = subtype;
ba4f7968 4256 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4257 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 4258
6fb5fa3c 4259 add_insn_before (note, before, NULL);
23b2ce53
RS
4260 return note;
4261}
4262\f
2f937369
DM
4263/* Helper for emit_insn_after, handles lists of instructions
4264 efficiently. */
23b2ce53 4265
2f937369 4266static rtx
6fb5fa3c 4267emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4268{
2f937369
DM
4269 rtx last;
4270 rtx after_after;
6fb5fa3c
DB
4271 if (!bb && !BARRIER_P (after))
4272 bb = BLOCK_FOR_INSN (after);
23b2ce53 4273
6fb5fa3c 4274 if (bb)
23b2ce53 4275 {
6fb5fa3c 4276 df_set_bb_dirty (bb);
2f937369 4277 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4278 if (!BARRIER_P (last))
6fb5fa3c
DB
4279 {
4280 set_block_for_insn (last, bb);
4281 df_insn_rescan (last);
4282 }
4b4bf941 4283 if (!BARRIER_P (last))
6fb5fa3c
DB
4284 {
4285 set_block_for_insn (last, bb);
4286 df_insn_rescan (last);
4287 }
a813c111
SB
4288 if (BB_END (bb) == after)
4289 BB_END (bb) = last;
23b2ce53
RS
4290 }
4291 else
2f937369
DM
4292 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4293 continue;
4294
4295 after_after = NEXT_INSN (after);
4296
4297 NEXT_INSN (after) = first;
4298 PREV_INSN (first) = after;
4299 NEXT_INSN (last) = after_after;
4300 if (after_after)
4301 PREV_INSN (after_after) = last;
4302
5936d944
JH
4303 if (after == get_last_insn())
4304 set_last_insn (last);
e855c69d 4305
2f937369
DM
4306 return last;
4307}
4308
5f02387d
NF
4309static rtx
4310emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4311 rtx (*make_raw)(rtx))
2f937369
DM
4312{
4313 rtx last = after;
4314
5b0264cb 4315 gcc_assert (after);
2f937369
DM
4316
4317 if (x == NULL_RTX)
4318 return last;
4319
4320 switch (GET_CODE (x))
23b2ce53 4321 {
b5b8b0ac 4322 case DEBUG_INSN:
2f937369
DM
4323 case INSN:
4324 case JUMP_INSN:
4325 case CALL_INSN:
4326 case CODE_LABEL:
4327 case BARRIER:
4328 case NOTE:
6fb5fa3c 4329 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4330 break;
4331
4332#ifdef ENABLE_RTL_CHECKING
4333 case SEQUENCE:
5b0264cb 4334 gcc_unreachable ();
2f937369
DM
4335 break;
4336#endif
4337
4338 default:
5f02387d 4339 last = (*make_raw) (x);
6fb5fa3c 4340 add_insn_after (last, after, bb);
2f937369 4341 break;
23b2ce53
RS
4342 }
4343
2f937369 4344 return last;
23b2ce53
RS
4345}
4346
5f02387d
NF
4347/* Make X be output after the insn AFTER and set the BB of insn. If
4348 BB is NULL, an attempt is made to infer the BB from AFTER. */
4349
4350rtx
4351emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4352{
4353 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4354}
4355
255680cf 4356
2f937369 4357/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4358 and output it after the insn AFTER. */
4359
4360rtx
a7102479 4361emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4362{
5f02387d 4363 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
2f937369
DM
4364}
4365
4366/* Make an instruction with body X and code CALL_INSN
4367 and output it after the instruction AFTER. */
4368
4369rtx
a7102479 4370emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4371{
5f02387d 4372 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4373}
4374
b5b8b0ac
AO
4375/* Make an instruction with body X and code CALL_INSN
4376 and output it after the instruction AFTER. */
4377
4378rtx
4379emit_debug_insn_after_noloc (rtx x, rtx after)
4380{
5f02387d 4381 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4382}
4383
23b2ce53
RS
4384/* Make an insn of code BARRIER
4385 and output it after the insn AFTER. */
4386
4387rtx
502b8322 4388emit_barrier_after (rtx after)
23b2ce53 4389{
b3694847 4390 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4391
4392 INSN_UID (insn) = cur_insn_uid++;
4393
6fb5fa3c 4394 add_insn_after (insn, after, NULL);
23b2ce53
RS
4395 return insn;
4396}
4397
4398/* Emit the label LABEL after the insn AFTER. */
4399
4400rtx
502b8322 4401emit_label_after (rtx label, rtx after)
23b2ce53 4402{
468660d3
SB
4403 gcc_checking_assert (INSN_UID (label) == 0);
4404 INSN_UID (label) = cur_insn_uid++;
4405 add_insn_after (label, after, NULL);
23b2ce53
RS
4406 return label;
4407}
4408
4409/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4410
4411rtx
a38e7aa5 4412emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4413{
b3694847 4414 rtx note = rtx_alloc (NOTE);
23b2ce53 4415 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4416 NOTE_KIND (note) = subtype;
ba4f7968 4417 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4418 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4419 add_insn_after (note, after, NULL);
23b2ce53
RS
4420 return note;
4421}
23b2ce53 4422\f
e8110d6f
NF
4423/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4424 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4425
4426static rtx
4427emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4428 rtx (*make_raw) (rtx))
0d682900 4429{
e8110d6f 4430 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4431
a7102479 4432 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4433 return last;
4434
2f937369
DM
4435 after = NEXT_INSN (after);
4436 while (1)
4437 {
5368224f
DC
4438 if (active_insn_p (after) && !INSN_LOCATION (after))
4439 INSN_LOCATION (after) = loc;
2f937369
DM
4440 if (after == last)
4441 break;
4442 after = NEXT_INSN (after);
4443 }
0d682900
JH
4444 return last;
4445}
4446
e8110d6f
NF
4447/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4448 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4449 any DEBUG_INSNs. */
4450
4451static rtx
4452emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4453 rtx (*make_raw) (rtx))
a7102479 4454{
b5b8b0ac
AO
4455 rtx prev = after;
4456
e8110d6f
NF
4457 if (skip_debug_insns)
4458 while (DEBUG_INSN_P (prev))
4459 prev = PREV_INSN (prev);
b5b8b0ac
AO
4460
4461 if (INSN_P (prev))
5368224f 4462 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4463 make_raw);
a7102479 4464 else
e8110d6f 4465 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4466}
4467
5368224f 4468/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
0d682900 4469rtx
e8110d6f 4470emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4471{
e8110d6f
NF
4472 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4473}
2f937369 4474
5368224f 4475/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
e8110d6f
NF
4476rtx
4477emit_insn_after (rtx pattern, rtx after)
4478{
4479 return emit_pattern_after (pattern, after, true, make_insn_raw);
4480}
dd3adcf8 4481
5368224f 4482/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
e8110d6f
NF
4483rtx
4484emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4485{
4486 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
0d682900
JH
4487}
4488
5368224f 4489/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
a7102479
JH
4490rtx
4491emit_jump_insn_after (rtx pattern, rtx after)
4492{
e8110d6f 4493 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
a7102479
JH
4494}
4495
5368224f 4496/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
0d682900 4497rtx
502b8322 4498emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4499{
e8110d6f 4500 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4501}
4502
5368224f 4503/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
a7102479
JH
4504rtx
4505emit_call_insn_after (rtx pattern, rtx after)
4506{
e8110d6f 4507 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4508}
4509
5368224f 4510/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
b5b8b0ac
AO
4511rtx
4512emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4513{
e8110d6f 4514 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4515}
4516
5368224f 4517/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
b5b8b0ac
AO
4518rtx
4519emit_debug_insn_after (rtx pattern, rtx after)
4520{
e8110d6f 4521 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4522}
4523
e8110d6f
NF
4524/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4525 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4526 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4527 CALL_INSN, etc. */
4528
4529static rtx
4530emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4531 rtx (*make_raw) (rtx))
0d682900
JH
4532{
4533 rtx first = PREV_INSN (before);
e8110d6f
NF
4534 rtx last = emit_pattern_before_noloc (pattern, before,
4535 insnp ? before : NULL_RTX,
4536 NULL, make_raw);
a7102479
JH
4537
4538 if (pattern == NULL_RTX || !loc)
4539 return last;
4540
26cb3993
JH
4541 if (!first)
4542 first = get_insns ();
4543 else
4544 first = NEXT_INSN (first);
a7102479
JH
4545 while (1)
4546 {
5368224f
DC
4547 if (active_insn_p (first) && !INSN_LOCATION (first))
4548 INSN_LOCATION (first) = loc;
a7102479
JH
4549 if (first == last)
4550 break;
4551 first = NEXT_INSN (first);
4552 }
4553 return last;
4554}
4555
e8110d6f
NF
4556/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4557 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4558 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4559 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4560
4561static rtx
4562emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4563 bool insnp, rtx (*make_raw) (rtx))
a7102479 4564{
b5b8b0ac
AO
4565 rtx next = before;
4566
e8110d6f
NF
4567 if (skip_debug_insns)
4568 while (DEBUG_INSN_P (next))
4569 next = PREV_INSN (next);
b5b8b0ac
AO
4570
4571 if (INSN_P (next))
5368224f 4572 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4573 insnp, make_raw);
a7102479 4574 else
e8110d6f
NF
4575 return emit_pattern_before_noloc (pattern, before,
4576 insnp ? before : NULL_RTX,
4577 NULL, make_raw);
a7102479
JH
4578}
4579
5368224f 4580/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
a7102479 4581rtx
e8110d6f 4582emit_insn_before_setloc (rtx pattern, rtx before, int loc)
a7102479 4583{
e8110d6f
NF
4584 return emit_pattern_before_setloc (pattern, before, loc, true,
4585 make_insn_raw);
4586}
a7102479 4587
5368224f 4588/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
e8110d6f
NF
4589rtx
4590emit_insn_before (rtx pattern, rtx before)
4591{
4592 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4593}
a7102479 4594
5368224f 4595/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
e8110d6f
NF
4596rtx
4597emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4598{
4599 return emit_pattern_before_setloc (pattern, before, loc, false,
4600 make_jump_insn_raw);
a7102479
JH
4601}
4602
5368224f 4603/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
a7102479
JH
4604rtx
4605emit_jump_insn_before (rtx pattern, rtx before)
4606{
e8110d6f
NF
4607 return emit_pattern_before (pattern, before, true, false,
4608 make_jump_insn_raw);
a7102479
JH
4609}
4610
5368224f 4611/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
a7102479
JH
4612rtx
4613emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4614{
e8110d6f
NF
4615 return emit_pattern_before_setloc (pattern, before, loc, false,
4616 make_call_insn_raw);
0d682900 4617}
a7102479 4618
e8110d6f 4619/* Like emit_call_insn_before_noloc,
5368224f 4620 but set insn_location according to BEFORE. */
a7102479
JH
4621rtx
4622emit_call_insn_before (rtx pattern, rtx before)
4623{
e8110d6f
NF
4624 return emit_pattern_before (pattern, before, true, false,
4625 make_call_insn_raw);
a7102479 4626}
b5b8b0ac 4627
5368224f 4628/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
b5b8b0ac
AO
4629rtx
4630emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4631{
e8110d6f
NF
4632 return emit_pattern_before_setloc (pattern, before, loc, false,
4633 make_debug_insn_raw);
b5b8b0ac
AO
4634}
4635
e8110d6f 4636/* Like emit_debug_insn_before_noloc,
5368224f 4637 but set insn_location according to BEFORE. */
b5b8b0ac
AO
4638rtx
4639emit_debug_insn_before (rtx pattern, rtx before)
4640{
e8110d6f
NF
4641 return emit_pattern_before (pattern, before, false, false,
4642 make_debug_insn_raw);
b5b8b0ac 4643}
0d682900 4644\f
2f937369
DM
4645/* Take X and emit it at the end of the doubly-linked
4646 INSN list.
23b2ce53
RS
4647
4648 Returns the last insn emitted. */
4649
4650rtx
502b8322 4651emit_insn (rtx x)
23b2ce53 4652{
5936d944 4653 rtx last = get_last_insn();
2f937369 4654 rtx insn;
23b2ce53 4655
2f937369
DM
4656 if (x == NULL_RTX)
4657 return last;
23b2ce53 4658
2f937369
DM
4659 switch (GET_CODE (x))
4660 {
b5b8b0ac 4661 case DEBUG_INSN:
2f937369
DM
4662 case INSN:
4663 case JUMP_INSN:
4664 case CALL_INSN:
4665 case CODE_LABEL:
4666 case BARRIER:
4667 case NOTE:
4668 insn = x;
4669 while (insn)
23b2ce53 4670 {
2f937369 4671 rtx next = NEXT_INSN (insn);
23b2ce53 4672 add_insn (insn);
2f937369
DM
4673 last = insn;
4674 insn = next;
23b2ce53 4675 }
2f937369 4676 break;
23b2ce53 4677
2f937369
DM
4678#ifdef ENABLE_RTL_CHECKING
4679 case SEQUENCE:
5b0264cb 4680 gcc_unreachable ();
2f937369
DM
4681 break;
4682#endif
23b2ce53 4683
2f937369
DM
4684 default:
4685 last = make_insn_raw (x);
4686 add_insn (last);
4687 break;
23b2ce53
RS
4688 }
4689
4690 return last;
4691}
4692
b5b8b0ac
AO
4693/* Make an insn of code DEBUG_INSN with pattern X
4694 and add it to the end of the doubly-linked list. */
4695
4696rtx
4697emit_debug_insn (rtx x)
4698{
5936d944 4699 rtx last = get_last_insn();
b5b8b0ac
AO
4700 rtx insn;
4701
4702 if (x == NULL_RTX)
4703 return last;
4704
4705 switch (GET_CODE (x))
4706 {
4707 case DEBUG_INSN:
4708 case INSN:
4709 case JUMP_INSN:
4710 case CALL_INSN:
4711 case CODE_LABEL:
4712 case BARRIER:
4713 case NOTE:
4714 insn = x;
4715 while (insn)
4716 {
4717 rtx next = NEXT_INSN (insn);
4718 add_insn (insn);
4719 last = insn;
4720 insn = next;
4721 }
4722 break;
4723
4724#ifdef ENABLE_RTL_CHECKING
4725 case SEQUENCE:
4726 gcc_unreachable ();
4727 break;
4728#endif
4729
4730 default:
4731 last = make_debug_insn_raw (x);
4732 add_insn (last);
4733 break;
4734 }
4735
4736 return last;
4737}
4738
2f937369
DM
4739/* Make an insn of code JUMP_INSN with pattern X
4740 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4741
4742rtx
502b8322 4743emit_jump_insn (rtx x)
23b2ce53 4744{
d950dee3 4745 rtx last = NULL_RTX, insn;
23b2ce53 4746
2f937369 4747 switch (GET_CODE (x))
23b2ce53 4748 {
b5b8b0ac 4749 case DEBUG_INSN:
2f937369
DM
4750 case INSN:
4751 case JUMP_INSN:
4752 case CALL_INSN:
4753 case CODE_LABEL:
4754 case BARRIER:
4755 case NOTE:
4756 insn = x;
4757 while (insn)
4758 {
4759 rtx next = NEXT_INSN (insn);
4760 add_insn (insn);
4761 last = insn;
4762 insn = next;
4763 }
4764 break;
e0a5c5eb 4765
2f937369
DM
4766#ifdef ENABLE_RTL_CHECKING
4767 case SEQUENCE:
5b0264cb 4768 gcc_unreachable ();
2f937369
DM
4769 break;
4770#endif
e0a5c5eb 4771
2f937369
DM
4772 default:
4773 last = make_jump_insn_raw (x);
4774 add_insn (last);
4775 break;
3c030e88 4776 }
e0a5c5eb
RS
4777
4778 return last;
4779}
4780
2f937369 4781/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4782 and add it to the end of the doubly-linked list. */
4783
4784rtx
502b8322 4785emit_call_insn (rtx x)
23b2ce53 4786{
2f937369
DM
4787 rtx insn;
4788
4789 switch (GET_CODE (x))
23b2ce53 4790 {
b5b8b0ac 4791 case DEBUG_INSN:
2f937369
DM
4792 case INSN:
4793 case JUMP_INSN:
4794 case CALL_INSN:
4795 case CODE_LABEL:
4796 case BARRIER:
4797 case NOTE:
4798 insn = emit_insn (x);
4799 break;
23b2ce53 4800
2f937369
DM
4801#ifdef ENABLE_RTL_CHECKING
4802 case SEQUENCE:
5b0264cb 4803 gcc_unreachable ();
2f937369
DM
4804 break;
4805#endif
23b2ce53 4806
2f937369
DM
4807 default:
4808 insn = make_call_insn_raw (x);
23b2ce53 4809 add_insn (insn);
2f937369 4810 break;
23b2ce53 4811 }
2f937369
DM
4812
4813 return insn;
23b2ce53
RS
4814}
4815
4816/* Add the label LABEL to the end of the doubly-linked list. */
4817
4818rtx
502b8322 4819emit_label (rtx label)
23b2ce53 4820{
468660d3
SB
4821 gcc_checking_assert (INSN_UID (label) == 0);
4822 INSN_UID (label) = cur_insn_uid++;
4823 add_insn (label);
23b2ce53
RS
4824 return label;
4825}
4826
4827/* Make an insn of code BARRIER
4828 and add it to the end of the doubly-linked list. */
4829
4830rtx
502b8322 4831emit_barrier (void)
23b2ce53 4832{
b3694847 4833 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4834 INSN_UID (barrier) = cur_insn_uid++;
4835 add_insn (barrier);
4836 return barrier;
4837}
4838
5f2fc772 4839/* Emit a copy of note ORIG. */
502b8322 4840
5f2fc772
NS
4841rtx
4842emit_note_copy (rtx orig)
4843{
4844 rtx note;
b8698a0f 4845
5f2fc772 4846 note = rtx_alloc (NOTE);
b8698a0f 4847
5f2fc772
NS
4848 INSN_UID (note) = cur_insn_uid++;
4849 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 4850 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
4851 BLOCK_FOR_INSN (note) = NULL;
4852 add_insn (note);
b8698a0f 4853
2e040219 4854 return note;
23b2ce53
RS
4855}
4856
2e040219
NS
4857/* Make an insn of code NOTE or type NOTE_NO
4858 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4859
4860rtx
a38e7aa5 4861emit_note (enum insn_note kind)
23b2ce53 4862{
b3694847 4863 rtx note;
23b2ce53 4864
23b2ce53
RS
4865 note = rtx_alloc (NOTE);
4866 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4867 NOTE_KIND (note) = kind;
dd107e66 4868 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4869 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4870 add_insn (note);
4871 return note;
4872}
4873
c41c1387
RS
4874/* Emit a clobber of lvalue X. */
4875
4876rtx
4877emit_clobber (rtx x)
4878{
4879 /* CONCATs should not appear in the insn stream. */
4880 if (GET_CODE (x) == CONCAT)
4881 {
4882 emit_clobber (XEXP (x, 0));
4883 return emit_clobber (XEXP (x, 1));
4884 }
4885 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4886}
4887
4888/* Return a sequence of insns to clobber lvalue X. */
4889
4890rtx
4891gen_clobber (rtx x)
4892{
4893 rtx seq;
4894
4895 start_sequence ();
4896 emit_clobber (x);
4897 seq = get_insns ();
4898 end_sequence ();
4899 return seq;
4900}
4901
4902/* Emit a use of rvalue X. */
4903
4904rtx
4905emit_use (rtx x)
4906{
4907 /* CONCATs should not appear in the insn stream. */
4908 if (GET_CODE (x) == CONCAT)
4909 {
4910 emit_use (XEXP (x, 0));
4911 return emit_use (XEXP (x, 1));
4912 }
4913 return emit_insn (gen_rtx_USE (VOIDmode, x));
4914}
4915
4916/* Return a sequence of insns to use rvalue X. */
4917
4918rtx
4919gen_use (rtx x)
4920{
4921 rtx seq;
4922
4923 start_sequence ();
4924 emit_use (x);
4925 seq = get_insns ();
4926 end_sequence ();
4927 return seq;
4928}
4929
87b47c85 4930/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4931 note of this type already exists, remove it first. */
87b47c85 4932
3d238248 4933rtx
502b8322 4934set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4935{
4936 rtx note = find_reg_note (insn, kind, NULL_RTX);
4937
52488da1
JW
4938 switch (kind)
4939 {
4940 case REG_EQUAL:
4941 case REG_EQUIV:
4942 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4943 has multiple sets (some callers assume single_set
4944 means the insn only has one set, when in fact it
4945 means the insn only has one * useful * set). */
4946 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4947 {
5b0264cb 4948 gcc_assert (!note);
52488da1
JW
4949 return NULL_RTX;
4950 }
4951
4952 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4953 It serves no useful purpose and breaks eliminate_regs. */
4954 if (GET_CODE (datum) == ASM_OPERANDS)
4955 return NULL_RTX;
6fb5fa3c
DB
4956
4957 if (note)
4958 {
4959 XEXP (note, 0) = datum;
4960 df_notes_rescan (insn);
4961 return note;
4962 }
52488da1
JW
4963 break;
4964
4965 default:
6fb5fa3c
DB
4966 if (note)
4967 {
4968 XEXP (note, 0) = datum;
4969 return note;
4970 }
52488da1
JW
4971 break;
4972 }
3d238248 4973
65c5f2a6 4974 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
4975
4976 switch (kind)
3d238248 4977 {
6fb5fa3c
DB
4978 case REG_EQUAL:
4979 case REG_EQUIV:
4980 df_notes_rescan (insn);
4981 break;
4982 default:
4983 break;
3d238248 4984 }
87b47c85 4985
3d238248 4986 return REG_NOTES (insn);
87b47c85 4987}
7543f918
JR
4988
4989/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
4990rtx
4991set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
4992{
4993 rtx set = single_set (insn);
4994
4995 if (set && SET_DEST (set) == dst)
4996 return set_unique_reg_note (insn, kind, datum);
4997 return NULL_RTX;
4998}
23b2ce53
RS
4999\f
5000/* Return an indication of which type of insn should have X as a body.
5001 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5002
d78db459 5003static enum rtx_code
502b8322 5004classify_insn (rtx x)
23b2ce53 5005{
4b4bf941 5006 if (LABEL_P (x))
23b2ce53
RS
5007 return CODE_LABEL;
5008 if (GET_CODE (x) == CALL)
5009 return CALL_INSN;
26898771 5010 if (ANY_RETURN_P (x))
23b2ce53
RS
5011 return JUMP_INSN;
5012 if (GET_CODE (x) == SET)
5013 {
5014 if (SET_DEST (x) == pc_rtx)
5015 return JUMP_INSN;
5016 else if (GET_CODE (SET_SRC (x)) == CALL)
5017 return CALL_INSN;
5018 else
5019 return INSN;
5020 }
5021 if (GET_CODE (x) == PARALLEL)
5022 {
b3694847 5023 int j;
23b2ce53
RS
5024 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5025 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5026 return CALL_INSN;
5027 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5028 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5029 return JUMP_INSN;
5030 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5031 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5032 return CALL_INSN;
5033 }
5034 return INSN;
5035}
5036
5037/* Emit the rtl pattern X as an appropriate kind of insn.
5038 If X is a label, it is simply added into the insn chain. */
5039
5040rtx
502b8322 5041emit (rtx x)
23b2ce53
RS
5042{
5043 enum rtx_code code = classify_insn (x);
5044
5b0264cb 5045 switch (code)
23b2ce53 5046 {
5b0264cb
NS
5047 case CODE_LABEL:
5048 return emit_label (x);
5049 case INSN:
5050 return emit_insn (x);
5051 case JUMP_INSN:
5052 {
5053 rtx insn = emit_jump_insn (x);
5054 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5055 return emit_barrier ();
5056 return insn;
5057 }
5058 case CALL_INSN:
5059 return emit_call_insn (x);
b5b8b0ac
AO
5060 case DEBUG_INSN:
5061 return emit_debug_insn (x);
5b0264cb
NS
5062 default:
5063 gcc_unreachable ();
23b2ce53 5064 }
23b2ce53
RS
5065}
5066\f
e2500fed 5067/* Space for free sequence stack entries. */
1431042e 5068static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5069
4dfa0342
RH
5070/* Begin emitting insns to a sequence. If this sequence will contain
5071 something that might cause the compiler to pop arguments to function
5072 calls (because those pops have previously been deferred; see
5073 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5074 before calling this function. That will ensure that the deferred
5075 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5076
5077void
502b8322 5078start_sequence (void)
23b2ce53
RS
5079{
5080 struct sequence_stack *tem;
5081
e2500fed
GK
5082 if (free_sequence_stack != NULL)
5083 {
5084 tem = free_sequence_stack;
5085 free_sequence_stack = tem->next;
5086 }
5087 else
a9429e29 5088 tem = ggc_alloc_sequence_stack ();
23b2ce53 5089
49ad7cfa 5090 tem->next = seq_stack;
5936d944
JH
5091 tem->first = get_insns ();
5092 tem->last = get_last_insn ();
23b2ce53 5093
49ad7cfa 5094 seq_stack = tem;
23b2ce53 5095
5936d944
JH
5096 set_first_insn (0);
5097 set_last_insn (0);
23b2ce53
RS
5098}
5099
5c7a310f
MM
5100/* Set up the insn chain starting with FIRST as the current sequence,
5101 saving the previously current one. See the documentation for
5102 start_sequence for more information about how to use this function. */
23b2ce53
RS
5103
5104void
502b8322 5105push_to_sequence (rtx first)
23b2ce53
RS
5106{
5107 rtx last;
5108
5109 start_sequence ();
5110
e84a58ff
EB
5111 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5112 ;
23b2ce53 5113
5936d944
JH
5114 set_first_insn (first);
5115 set_last_insn (last);
23b2ce53
RS
5116}
5117
bb27eeda
SE
5118/* Like push_to_sequence, but take the last insn as an argument to avoid
5119 looping through the list. */
5120
5121void
5122push_to_sequence2 (rtx first, rtx last)
5123{
5124 start_sequence ();
5125
5936d944
JH
5126 set_first_insn (first);
5127 set_last_insn (last);
bb27eeda
SE
5128}
5129
f15ae3a1
TW
5130/* Set up the outer-level insn chain
5131 as the current sequence, saving the previously current one. */
5132
5133void
502b8322 5134push_topmost_sequence (void)
f15ae3a1 5135{
aefdd5ab 5136 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5137
5138 start_sequence ();
5139
49ad7cfa 5140 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5141 top = stack;
5142
5936d944
JH
5143 set_first_insn (top->first);
5144 set_last_insn (top->last);
f15ae3a1
TW
5145}
5146
5147/* After emitting to the outer-level insn chain, update the outer-level
5148 insn chain, and restore the previous saved state. */
5149
5150void
502b8322 5151pop_topmost_sequence (void)
f15ae3a1 5152{
aefdd5ab 5153 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5154
49ad7cfa 5155 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5156 top = stack;
5157
5936d944
JH
5158 top->first = get_insns ();
5159 top->last = get_last_insn ();
f15ae3a1
TW
5160
5161 end_sequence ();
5162}
5163
23b2ce53
RS
5164/* After emitting to a sequence, restore previous saved state.
5165
5c7a310f 5166 To get the contents of the sequence just made, you must call
2f937369 5167 `get_insns' *before* calling here.
5c7a310f
MM
5168
5169 If the compiler might have deferred popping arguments while
5170 generating this sequence, and this sequence will not be immediately
5171 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5172 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5173 pops are inserted into this sequence, and not into some random
5174 location in the instruction stream. See INHIBIT_DEFER_POP for more
5175 information about deferred popping of arguments. */
23b2ce53
RS
5176
5177void
502b8322 5178end_sequence (void)
23b2ce53 5179{
49ad7cfa 5180 struct sequence_stack *tem = seq_stack;
23b2ce53 5181
5936d944
JH
5182 set_first_insn (tem->first);
5183 set_last_insn (tem->last);
49ad7cfa 5184 seq_stack = tem->next;
23b2ce53 5185
e2500fed
GK
5186 memset (tem, 0, sizeof (*tem));
5187 tem->next = free_sequence_stack;
5188 free_sequence_stack = tem;
23b2ce53
RS
5189}
5190
5191/* Return 1 if currently emitting into a sequence. */
5192
5193int
502b8322 5194in_sequence_p (void)
23b2ce53 5195{
49ad7cfa 5196 return seq_stack != 0;
23b2ce53 5197}
23b2ce53 5198\f
59ec66dc
MM
5199/* Put the various virtual registers into REGNO_REG_RTX. */
5200
2bbdec73 5201static void
bd60bab2 5202init_virtual_regs (void)
59ec66dc 5203{
bd60bab2
JH
5204 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5205 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5206 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5207 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5208 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5209 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5210 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5211}
5212
da43a810
BS
5213\f
5214/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5215static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5216static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5217static int copy_insn_n_scratches;
5218
5219/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5220 copied an ASM_OPERANDS.
5221 In that case, it is the original input-operand vector. */
5222static rtvec orig_asm_operands_vector;
5223
5224/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5225 copied an ASM_OPERANDS.
5226 In that case, it is the copied input-operand vector. */
5227static rtvec copy_asm_operands_vector;
5228
5229/* Likewise for the constraints vector. */
5230static rtvec orig_asm_constraints_vector;
5231static rtvec copy_asm_constraints_vector;
5232
5233/* Recursively create a new copy of an rtx for copy_insn.
5234 This function differs from copy_rtx in that it handles SCRATCHes and
5235 ASM_OPERANDs properly.
5236 Normally, this function is not used directly; use copy_insn as front end.
5237 However, you could first copy an insn pattern with copy_insn and then use
5238 this function afterwards to properly copy any REG_NOTEs containing
5239 SCRATCHes. */
5240
5241rtx
502b8322 5242copy_insn_1 (rtx orig)
da43a810 5243{
b3694847
SS
5244 rtx copy;
5245 int i, j;
5246 RTX_CODE code;
5247 const char *format_ptr;
da43a810 5248
cd9c1ca8
RH
5249 if (orig == NULL)
5250 return NULL;
5251
da43a810
BS
5252 code = GET_CODE (orig);
5253
5254 switch (code)
5255 {
5256 case REG:
a52a87c3 5257 case DEBUG_EXPR:
d8116890 5258 CASE_CONST_ANY:
da43a810
BS
5259 case SYMBOL_REF:
5260 case CODE_LABEL:
5261 case PC:
5262 case CC0:
276e0224 5263 case RETURN:
26898771 5264 case SIMPLE_RETURN:
da43a810 5265 return orig;
3e89ed8d
JH
5266 case CLOBBER:
5267 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5268 return orig;
5269 break;
da43a810
BS
5270
5271 case SCRATCH:
5272 for (i = 0; i < copy_insn_n_scratches; i++)
5273 if (copy_insn_scratch_in[i] == orig)
5274 return copy_insn_scratch_out[i];
5275 break;
5276
5277 case CONST:
6fb5fa3c 5278 if (shared_const_p (orig))
da43a810
BS
5279 return orig;
5280 break;
750c9258 5281
da43a810
BS
5282 /* A MEM with a constant address is not sharable. The problem is that
5283 the constant address may need to be reloaded. If the mem is shared,
5284 then reloading one copy of this mem will cause all copies to appear
5285 to have been reloaded. */
5286
5287 default:
5288 break;
5289 }
5290
aacd3885
RS
5291 /* Copy the various flags, fields, and other information. We assume
5292 that all fields need copying, and then clear the fields that should
da43a810
BS
5293 not be copied. That is the sensible default behavior, and forces
5294 us to explicitly document why we are *not* copying a flag. */
aacd3885 5295 copy = shallow_copy_rtx (orig);
da43a810
BS
5296
5297 /* We do not copy the USED flag, which is used as a mark bit during
5298 walks over the RTL. */
2adc7f12 5299 RTX_FLAG (copy, used) = 0;
da43a810
BS
5300
5301 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5302 if (INSN_P (orig))
da43a810 5303 {
2adc7f12
JJ
5304 RTX_FLAG (copy, jump) = 0;
5305 RTX_FLAG (copy, call) = 0;
5306 RTX_FLAG (copy, frame_related) = 0;
da43a810 5307 }
750c9258 5308
da43a810
BS
5309 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5310
5311 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5312 switch (*format_ptr++)
5313 {
5314 case 'e':
5315 if (XEXP (orig, i) != NULL)
5316 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5317 break;
da43a810 5318
aacd3885
RS
5319 case 'E':
5320 case 'V':
5321 if (XVEC (orig, i) == orig_asm_constraints_vector)
5322 XVEC (copy, i) = copy_asm_constraints_vector;
5323 else if (XVEC (orig, i) == orig_asm_operands_vector)
5324 XVEC (copy, i) = copy_asm_operands_vector;
5325 else if (XVEC (orig, i) != NULL)
5326 {
5327 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5328 for (j = 0; j < XVECLEN (copy, i); j++)
5329 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5330 }
5331 break;
da43a810 5332
aacd3885
RS
5333 case 't':
5334 case 'w':
5335 case 'i':
5336 case 's':
5337 case 'S':
5338 case 'u':
5339 case '0':
5340 /* These are left unchanged. */
5341 break;
da43a810 5342
aacd3885
RS
5343 default:
5344 gcc_unreachable ();
5345 }
da43a810
BS
5346
5347 if (code == SCRATCH)
5348 {
5349 i = copy_insn_n_scratches++;
5b0264cb 5350 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5351 copy_insn_scratch_in[i] = orig;
5352 copy_insn_scratch_out[i] = copy;
5353 }
5354 else if (code == ASM_OPERANDS)
5355 {
6462bb43
AO
5356 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5357 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5358 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5359 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5360 }
5361
5362 return copy;
5363}
5364
5365/* Create a new copy of an rtx.
5366 This function differs from copy_rtx in that it handles SCRATCHes and
5367 ASM_OPERANDs properly.
5368 INSN doesn't really have to be a full INSN; it could be just the
5369 pattern. */
5370rtx
502b8322 5371copy_insn (rtx insn)
da43a810
BS
5372{
5373 copy_insn_n_scratches = 0;
5374 orig_asm_operands_vector = 0;
5375 orig_asm_constraints_vector = 0;
5376 copy_asm_operands_vector = 0;
5377 copy_asm_constraints_vector = 0;
5378 return copy_insn_1 (insn);
5379}
59ec66dc 5380
23b2ce53
RS
5381/* Initialize data structures and variables in this file
5382 before generating rtl for each function. */
5383
5384void
502b8322 5385init_emit (void)
23b2ce53 5386{
5936d944
JH
5387 set_first_insn (NULL);
5388 set_last_insn (NULL);
b5b8b0ac
AO
5389 if (MIN_NONDEBUG_INSN_UID)
5390 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5391 else
5392 cur_insn_uid = 1;
5393 cur_debug_insn_uid = 1;
23b2ce53 5394 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5395 first_label_num = label_num;
49ad7cfa 5396 seq_stack = NULL;
23b2ce53 5397
23b2ce53
RS
5398 /* Init the tables that describe all the pseudo regs. */
5399
3e029763 5400 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5401
3e029763 5402 crtl->emit.regno_pointer_align
1b4572a8 5403 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5404
a9429e29 5405 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
0d4903b8 5406
e50126e8 5407 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5408 memcpy (regno_reg_rtx,
5fb0e246 5409 initial_regno_reg_rtx,
6cde4876 5410 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5411
23b2ce53 5412 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5413 init_virtual_regs ();
740ab4a2
RK
5414
5415 /* Indicate that the virtual registers and stack locations are
5416 all pointers. */
3502dc9c
JDA
5417 REG_POINTER (stack_pointer_rtx) = 1;
5418 REG_POINTER (frame_pointer_rtx) = 1;
5419 REG_POINTER (hard_frame_pointer_rtx) = 1;
5420 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5421
3502dc9c
JDA
5422 REG_POINTER (virtual_incoming_args_rtx) = 1;
5423 REG_POINTER (virtual_stack_vars_rtx) = 1;
5424 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5425 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5426 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5427
86fe05e0 5428#ifdef STACK_BOUNDARY
bdb429a5
RK
5429 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5430 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5431 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5432 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5433
5434 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5435 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5436 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5437 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5438 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5439#endif
5440
5e82e7bd
JVA
5441#ifdef INIT_EXPANDERS
5442 INIT_EXPANDERS;
5443#endif
23b2ce53
RS
5444}
5445
a73b091d 5446/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5447
5448static rtx
a73b091d 5449gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5450{
5451 rtx tem;
5452 rtvec v;
5453 int units, i;
5454 enum machine_mode inner;
5455
5456 units = GET_MODE_NUNITS (mode);
5457 inner = GET_MODE_INNER (mode);
5458
15ed7b52
JG
5459 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5460
69ef87e2
AH
5461 v = rtvec_alloc (units);
5462
a73b091d
JW
5463 /* We need to call this function after we set the scalar const_tiny_rtx
5464 entries. */
5465 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5466
5467 for (i = 0; i < units; ++i)
a73b091d 5468 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5469
a06e3c40 5470 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5471 return tem;
5472}
5473
a06e3c40 5474/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5475 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5476rtx
502b8322 5477gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5478{
a73b091d
JW
5479 enum machine_mode inner = GET_MODE_INNER (mode);
5480 int nunits = GET_MODE_NUNITS (mode);
5481 rtx x;
a06e3c40
R
5482 int i;
5483
a73b091d
JW
5484 /* Check to see if all of the elements have the same value. */
5485 x = RTVEC_ELT (v, nunits - 1);
5486 for (i = nunits - 2; i >= 0; i--)
5487 if (RTVEC_ELT (v, i) != x)
5488 break;
5489
5490 /* If the values are all the same, check to see if we can use one of the
5491 standard constant vectors. */
5492 if (i == -1)
5493 {
5494 if (x == CONST0_RTX (inner))
5495 return CONST0_RTX (mode);
5496 else if (x == CONST1_RTX (inner))
5497 return CONST1_RTX (mode);
e7c82a99
JJ
5498 else if (x == CONSTM1_RTX (inner))
5499 return CONSTM1_RTX (mode);
a73b091d
JW
5500 }
5501
5502 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5503}
5504
b5deb7b6
SL
5505/* Initialise global register information required by all functions. */
5506
5507void
5508init_emit_regs (void)
5509{
5510 int i;
1c3f523e
RS
5511 enum machine_mode mode;
5512 mem_attrs *attrs;
b5deb7b6
SL
5513
5514 /* Reset register attributes */
5515 htab_empty (reg_attrs_htab);
5516
5517 /* We need reg_raw_mode, so initialize the modes now. */
5518 init_reg_modes_target ();
5519
5520 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5521 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5522 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5523 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5524 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5525 virtual_incoming_args_rtx =
5526 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5527 virtual_stack_vars_rtx =
5528 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5529 virtual_stack_dynamic_rtx =
5530 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5531 virtual_outgoing_args_rtx =
5532 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5533 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5534 virtual_preferred_stack_boundary_rtx =
5535 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5536
5537 /* Initialize RTL for commonly used hard registers. These are
5538 copied into regno_reg_rtx as we begin to compile each function. */
5539 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5540 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5541
5542#ifdef RETURN_ADDRESS_POINTER_REGNUM
5543 return_address_pointer_rtx
5544 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5545#endif
5546
b5deb7b6
SL
5547 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5548 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5549 else
5550 pic_offset_table_rtx = NULL_RTX;
1c3f523e
RS
5551
5552 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5553 {
5554 mode = (enum machine_mode) i;
5555 attrs = ggc_alloc_cleared_mem_attrs ();
5556 attrs->align = BITS_PER_UNIT;
5557 attrs->addrspace = ADDR_SPACE_GENERIC;
5558 if (mode != BLKmode)
5559 {
754c3d5d
RS
5560 attrs->size_known_p = true;
5561 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5562 if (STRICT_ALIGNMENT)
5563 attrs->align = GET_MODE_ALIGNMENT (mode);
5564 }
5565 mode_mem_attrs[i] = attrs;
5566 }
b5deb7b6
SL
5567}
5568
2d888286 5569/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5570
5571void
2d888286 5572init_emit_once (void)
23b2ce53
RS
5573{
5574 int i;
5575 enum machine_mode mode;
9ec36da5 5576 enum machine_mode double_mode;
23b2ce53 5577
091a3ac7
CF
5578 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5579 hash tables. */
17211ab5
GK
5580 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5581 const_int_htab_eq, NULL);
173b24b9 5582
17211ab5
GK
5583 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5584 const_double_htab_eq, NULL);
5692c7bc 5585
091a3ac7
CF
5586 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5587 const_fixed_htab_eq, NULL);
5588
17211ab5
GK
5589 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5590 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5591 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5592 reg_attrs_htab_eq, NULL);
67673f5c 5593
43fa6302
AS
5594 /* Compute the word and byte modes. */
5595
5596 byte_mode = VOIDmode;
5597 word_mode = VOIDmode;
5598 double_mode = VOIDmode;
5599
15ed7b52
JG
5600 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5601 mode != VOIDmode;
43fa6302
AS
5602 mode = GET_MODE_WIDER_MODE (mode))
5603 {
5604 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5605 && byte_mode == VOIDmode)
5606 byte_mode = mode;
5607
5608 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5609 && word_mode == VOIDmode)
5610 word_mode = mode;
5611 }
5612
15ed7b52
JG
5613 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5614 mode != VOIDmode;
43fa6302
AS
5615 mode = GET_MODE_WIDER_MODE (mode))
5616 {
5617 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5618 && double_mode == VOIDmode)
5619 double_mode = mode;
5620 }
5621
5622 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5623
5da077de 5624#ifdef INIT_EXPANDERS
414c4dc4
NC
5625 /* This is to initialize {init|mark|free}_machine_status before the first
5626 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5627 end which calls push_function_context_to before the first call to
5da077de
AS
5628 init_function_start. */
5629 INIT_EXPANDERS;
5630#endif
5631
23b2ce53
RS
5632 /* Create the unique rtx's for certain rtx codes and operand values. */
5633
a2a8cc44 5634 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5635 tries to use these variables. */
23b2ce53 5636 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5637 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5638 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5639
68d75312
JC
5640 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5641 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5642 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5643 else
3b80f6ca 5644 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5645
5692c7bc
ZW
5646 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5647 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5648 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5649
5650 dconstm1 = dconst1;
5651 dconstm1.sign = 1;
03f2ea93
RS
5652
5653 dconsthalf = dconst1;
1e92bbb9 5654 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5655
e7c82a99 5656 for (i = 0; i < 3; i++)
23b2ce53 5657 {
aefa9d43 5658 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5659 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5660
15ed7b52
JG
5661 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5662 mode != VOIDmode;
5663 mode = GET_MODE_WIDER_MODE (mode))
5664 const_tiny_rtx[i][(int) mode] =
5665 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5666
5667 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5668 mode != VOIDmode;
23b2ce53 5669 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5670 const_tiny_rtx[i][(int) mode] =
5671 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5672
906c4e36 5673 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5674
15ed7b52
JG
5675 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5676 mode != VOIDmode;
23b2ce53 5677 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5678 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5679
ede6c734
MS
5680 for (mode = MIN_MODE_PARTIAL_INT;
5681 mode <= MAX_MODE_PARTIAL_INT;
5682 mode = (enum machine_mode)((int)(mode) + 1))
33d3e559 5683 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5684 }
5685
e7c82a99
JJ
5686 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5687
5688 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5689 mode != VOIDmode;
5690 mode = GET_MODE_WIDER_MODE (mode))
5691 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5692
ede6c734
MS
5693 for (mode = MIN_MODE_PARTIAL_INT;
5694 mode <= MAX_MODE_PARTIAL_INT;
5695 mode = (enum machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5696 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5697
e90721b1
AP
5698 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5699 mode != VOIDmode;
5700 mode = GET_MODE_WIDER_MODE (mode))
5701 {
5702 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5703 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5704 }
5705
5706 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5707 mode != VOIDmode;
5708 mode = GET_MODE_WIDER_MODE (mode))
5709 {
5710 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5711 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5712 }
5713
69ef87e2
AH
5714 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5715 mode != VOIDmode;
5716 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5717 {
5718 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5719 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 5720 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 5721 }
69ef87e2
AH
5722
5723 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5724 mode != VOIDmode;
5725 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5726 {
5727 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5728 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5729 }
69ef87e2 5730
325217ed
CF
5731 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5732 mode != VOIDmode;
5733 mode = GET_MODE_WIDER_MODE (mode))
5734 {
5735 FCONST0(mode).data.high = 0;
5736 FCONST0(mode).data.low = 0;
5737 FCONST0(mode).mode = mode;
091a3ac7
CF
5738 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5739 FCONST0 (mode), mode);
325217ed
CF
5740 }
5741
5742 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5743 mode != VOIDmode;
5744 mode = GET_MODE_WIDER_MODE (mode))
5745 {
5746 FCONST0(mode).data.high = 0;
5747 FCONST0(mode).data.low = 0;
5748 FCONST0(mode).mode = mode;
091a3ac7
CF
5749 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5750 FCONST0 (mode), mode);
325217ed
CF
5751 }
5752
5753 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5754 mode != VOIDmode;
5755 mode = GET_MODE_WIDER_MODE (mode))
5756 {
5757 FCONST0(mode).data.high = 0;
5758 FCONST0(mode).data.low = 0;
5759 FCONST0(mode).mode = mode;
091a3ac7
CF
5760 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5761 FCONST0 (mode), mode);
325217ed
CF
5762
5763 /* We store the value 1. */
5764 FCONST1(mode).data.high = 0;
5765 FCONST1(mode).data.low = 0;
5766 FCONST1(mode).mode = mode;
9be0ac8c
LC
5767 FCONST1(mode).data
5768 = double_int_one.lshift (GET_MODE_FBIT (mode),
5769 HOST_BITS_PER_DOUBLE_INT,
5770 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5771 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5772 FCONST1 (mode), mode);
325217ed
CF
5773 }
5774
5775 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5776 mode != VOIDmode;
5777 mode = GET_MODE_WIDER_MODE (mode))
5778 {
5779 FCONST0(mode).data.high = 0;
5780 FCONST0(mode).data.low = 0;
5781 FCONST0(mode).mode = mode;
091a3ac7
CF
5782 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5783 FCONST0 (mode), mode);
325217ed
CF
5784
5785 /* We store the value 1. */
5786 FCONST1(mode).data.high = 0;
5787 FCONST1(mode).data.low = 0;
5788 FCONST1(mode).mode = mode;
9be0ac8c
LC
5789 FCONST1(mode).data
5790 = double_int_one.lshift (GET_MODE_FBIT (mode),
5791 HOST_BITS_PER_DOUBLE_INT,
5792 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5793 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5794 FCONST1 (mode), mode);
5795 }
5796
5797 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5798 mode != VOIDmode;
5799 mode = GET_MODE_WIDER_MODE (mode))
5800 {
5801 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5802 }
5803
5804 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5805 mode != VOIDmode;
5806 mode = GET_MODE_WIDER_MODE (mode))
5807 {
5808 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5809 }
5810
5811 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5812 mode != VOIDmode;
5813 mode = GET_MODE_WIDER_MODE (mode))
5814 {
5815 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5816 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5817 }
5818
5819 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5820 mode != VOIDmode;
5821 mode = GET_MODE_WIDER_MODE (mode))
5822 {
5823 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5824 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5825 }
5826
dbbbbf3b
JDA
5827 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5828 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5829 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5830
f0417c82
RH
5831 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5832 if (STORE_FLAG_VALUE == 1)
5833 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91
RS
5834
5835 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5836 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5837 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5838 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
23b2ce53 5839}
a11759a3 5840\f
969d70ca
JH
5841/* Produce exact duplicate of insn INSN after AFTER.
5842 Care updating of libcall regions if present. */
5843
5844rtx
502b8322 5845emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5846{
60564289 5847 rtx new_rtx, link;
969d70ca
JH
5848
5849 switch (GET_CODE (insn))
5850 {
5851 case INSN:
60564289 5852 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5853 break;
5854
5855 case JUMP_INSN:
60564289 5856 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5857 break;
5858
b5b8b0ac
AO
5859 case DEBUG_INSN:
5860 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5861 break;
5862
969d70ca 5863 case CALL_INSN:
60564289 5864 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5865 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5866 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5867 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5868 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5869 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5870 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 5871 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 5872 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
5873 break;
5874
5875 default:
5b0264cb 5876 gcc_unreachable ();
969d70ca
JH
5877 }
5878
5879 /* Update LABEL_NUSES. */
60564289 5880 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 5881
5368224f 5882 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 5883
0a3d71f5
JW
5884 /* If the old insn is frame related, then so is the new one. This is
5885 primarily needed for IA-64 unwind info which marks epilogue insns,
5886 which may be duplicated by the basic block reordering code. */
60564289 5887 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 5888
cf7c4aa6
HPN
5889 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5890 will make them. REG_LABEL_TARGETs are created there too, but are
5891 supposed to be sticky, so we copy them. */
969d70ca 5892 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 5893 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
5894 {
5895 if (GET_CODE (link) == EXPR_LIST)
60564289 5896 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 5897 copy_insn_1 (XEXP (link, 0)));
969d70ca 5898 else
60564289 5899 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
5900 }
5901
60564289
KG
5902 INSN_CODE (new_rtx) = INSN_CODE (insn);
5903 return new_rtx;
969d70ca 5904}
e2500fed 5905
1431042e 5906static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5907rtx
5908gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5909{
5910 if (hard_reg_clobbers[mode][regno])
5911 return hard_reg_clobbers[mode][regno];
5912 else
5913 return (hard_reg_clobbers[mode][regno] =
5914 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5915}
5916
5368224f
DC
5917location_t prologue_location;
5918location_t epilogue_location;
78bde837
SB
5919
5920/* Hold current location information and last location information, so the
5921 datastructures are built lazily only when some instructions in given
5922 place are needed. */
5923static location_t curr_location, last_location;
78bde837 5924
5368224f 5925/* Allocate insn location datastructure. */
78bde837 5926void
5368224f 5927insn_locations_init (void)
78bde837 5928{
5368224f 5929 prologue_location = epilogue_location = 0;
78bde837
SB
5930 curr_location = UNKNOWN_LOCATION;
5931 last_location = UNKNOWN_LOCATION;
78bde837
SB
5932}
5933
5934/* At the end of emit stage, clear current location. */
5935void
5368224f 5936insn_locations_finalize (void)
78bde837 5937{
5368224f
DC
5938 epilogue_location = curr_location;
5939 curr_location = UNKNOWN_LOCATION;
78bde837
SB
5940}
5941
5942/* Set current location. */
5943void
5368224f 5944set_curr_insn_location (location_t location)
78bde837 5945{
78bde837
SB
5946 curr_location = location;
5947}
5948
5949/* Get current location. */
5950location_t
5368224f 5951curr_insn_location (void)
78bde837
SB
5952{
5953 return curr_location;
5954}
5955
78bde837
SB
5956/* Return lexical scope block insn belongs to. */
5957tree
5958insn_scope (const_rtx insn)
5959{
5368224f 5960 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
5961}
5962
5963/* Return line number of the statement that produced this insn. */
5964int
5965insn_line (const_rtx insn)
5966{
5368224f 5967 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
5968}
5969
5970/* Return source file of the statement that produced this insn. */
5971const char *
5972insn_file (const_rtx insn)
5973{
5368224f 5974 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 5975}
8930883e
MK
5976
5977/* Return true if memory model MODEL requires a pre-operation (release-style)
5978 barrier or a post-operation (acquire-style) barrier. While not universal,
5979 this function matches behavior of several targets. */
5980
5981bool
5982need_atomic_barrier_p (enum memmodel model, bool pre)
5983{
5984 switch (model)
5985 {
5986 case MEMMODEL_RELAXED:
5987 case MEMMODEL_CONSUME:
5988 return false;
5989 case MEMMODEL_RELEASE:
5990 return pre;
5991 case MEMMODEL_ACQUIRE:
5992 return !pre;
5993 case MEMMODEL_ACQ_REL:
5994 case MEMMODEL_SEQ_CST:
5995 return true;
5996 default:
5997 gcc_unreachable ();
5998 }
5999}
6000\f
e2500fed 6001#include "gt-emit-rtl.h"