]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
lra.c (lra): Move init_reg_info and expand_reg_info calls before init_insn_recog_data.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
8e383849 2 Copyright (C) 1987, 1988, 1992-2012 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
718f9c0f 38#include "diagnostic-core.h"
23b2ce53 39#include "rtl.h"
a25c7971 40#include "tree.h"
6baf1cc8 41#include "tm_p.h"
23b2ce53
RS
42#include "flags.h"
43#include "function.h"
44#include "expr.h"
45#include "regs.h"
aff48bca 46#include "hard-reg-set.h"
c13e8210 47#include "hashtab.h"
23b2ce53 48#include "insn-config.h"
e9a25f70 49#include "recog.h"
0dfa1860 50#include "bitmap.h"
a05924f9 51#include "basic-block.h"
87ff9c8e 52#include "ggc.h"
e1772ac0 53#include "debug.h"
d23c55c2 54#include "langhooks.h"
6fb5fa3c 55#include "df.h"
b5b8b0ac 56#include "params.h"
d4ebfa65 57#include "target.h"
ca695ac9 58
5fb0e246
RS
59struct target_rtl default_target_rtl;
60#if SWITCHABLE_TARGET
61struct target_rtl *this_target_rtl = &default_target_rtl;
62#endif
63
64#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
65
1d445e9e
ILT
66/* Commonly used modes. */
67
0f41302f
MS
68enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
69enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 70enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 71enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 72
bd60bab2
JH
73/* Datastructures maintained for currently processed function in RTL form. */
74
3e029763 75struct rtl_data x_rtl;
bd60bab2
JH
76
77/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 78 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
79 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
80 with length attribute nested in top level structures. */
81
82rtx * regno_reg_rtx;
23b2ce53
RS
83
84/* This is *not* reset after each function. It gives each CODE_LABEL
85 in the entire compilation a unique label number. */
86
044b4de3 87static GTY(()) int label_num = 1;
23b2ce53 88
23b2ce53
RS
89/* We record floating-point CONST_DOUBLEs in each floating-point mode for
90 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
91 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
92 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 93
e7c82a99 94rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 95
68d75312
JC
96rtx const_true_rtx;
97
23b2ce53
RS
98REAL_VALUE_TYPE dconst0;
99REAL_VALUE_TYPE dconst1;
100REAL_VALUE_TYPE dconst2;
101REAL_VALUE_TYPE dconstm1;
03f2ea93 102REAL_VALUE_TYPE dconsthalf;
23b2ce53 103
325217ed
CF
104/* Record fixed-point constant 0 and 1. */
105FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
106FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
107
23b2ce53
RS
108/* We make one copy of (const_int C) where C is in
109 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
110 to save space during the compilation and simplify comparisons of
111 integers. */
112
5da077de 113rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 114
ca4adc91
RS
115/* Standard pieces of rtx, to be substituted directly into things. */
116rtx pc_rtx;
117rtx ret_rtx;
118rtx simple_return_rtx;
119rtx cc0_rtx;
120
c13e8210
MM
121/* A hash table storing CONST_INTs whose absolute value is greater
122 than MAX_SAVED_CONST_INT. */
123
e2500fed
GK
124static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
125 htab_t const_int_htab;
c13e8210 126
173b24b9 127/* A hash table storing memory attribute structures. */
e2500fed
GK
128static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
129 htab_t mem_attrs_htab;
173b24b9 130
a560d4d4
JH
131/* A hash table storing register attribute structures. */
132static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
133 htab_t reg_attrs_htab;
134
5692c7bc 135/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
136static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
137 htab_t const_double_htab;
5692c7bc 138
091a3ac7
CF
139/* A hash table storing all CONST_FIXEDs. */
140static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
141 htab_t const_fixed_htab;
142
3e029763 143#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 144#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 145#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 146
502b8322 147static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 148static void set_used_decls (tree);
502b8322
AJ
149static void mark_label_nuses (rtx);
150static hashval_t const_int_htab_hash (const void *);
151static int const_int_htab_eq (const void *, const void *);
152static hashval_t const_double_htab_hash (const void *);
153static int const_double_htab_eq (const void *, const void *);
154static rtx lookup_const_double (rtx);
091a3ac7
CF
155static hashval_t const_fixed_htab_hash (const void *);
156static int const_fixed_htab_eq (const void *, const void *);
157static rtx lookup_const_fixed (rtx);
502b8322
AJ
158static hashval_t mem_attrs_htab_hash (const void *);
159static int mem_attrs_htab_eq (const void *, const void *);
502b8322
AJ
160static hashval_t reg_attrs_htab_hash (const void *);
161static int reg_attrs_htab_eq (const void *, const void *);
162static reg_attrs *get_reg_attrs (tree, int);
a73b091d 163static rtx gen_const_vector (enum machine_mode, int);
32b32b16 164static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 165
6b24c259
JH
166/* Probability of the conditional branch currently proceeded by try_split.
167 Set to -1 otherwise. */
168int split_branch_probability = -1;
ca695ac9 169\f
c13e8210
MM
170/* Returns a hash code for X (which is a really a CONST_INT). */
171
172static hashval_t
502b8322 173const_int_htab_hash (const void *x)
c13e8210 174{
f7d504c2 175 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
176}
177
cc2902df 178/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
179 CONST_INT) is the same as that given by Y (which is really a
180 HOST_WIDE_INT *). */
181
182static int
502b8322 183const_int_htab_eq (const void *x, const void *y)
c13e8210 184{
f7d504c2 185 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
186}
187
188/* Returns a hash code for X (which is really a CONST_DOUBLE). */
189static hashval_t
502b8322 190const_double_htab_hash (const void *x)
5692c7bc 191{
f7d504c2 192 const_rtx const value = (const_rtx) x;
46b33600 193 hashval_t h;
5692c7bc 194
46b33600
RH
195 if (GET_MODE (value) == VOIDmode)
196 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
197 else
fe352c29 198 {
15c812e3 199 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
200 /* MODE is used in the comparison, so it should be in the hash. */
201 h ^= GET_MODE (value);
202 }
5692c7bc
ZW
203 return h;
204}
205
cc2902df 206/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
207 is the same as that represented by Y (really a ...) */
208static int
502b8322 209const_double_htab_eq (const void *x, const void *y)
5692c7bc 210{
f7d504c2 211 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
212
213 if (GET_MODE (a) != GET_MODE (b))
214 return 0;
8580f7a0
RH
215 if (GET_MODE (a) == VOIDmode)
216 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
217 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
218 else
219 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
220 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
221}
222
091a3ac7
CF
223/* Returns a hash code for X (which is really a CONST_FIXED). */
224
225static hashval_t
226const_fixed_htab_hash (const void *x)
227{
3101faab 228 const_rtx const value = (const_rtx) x;
091a3ac7
CF
229 hashval_t h;
230
231 h = fixed_hash (CONST_FIXED_VALUE (value));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h ^= GET_MODE (value);
234 return h;
235}
236
237/* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...). */
239
240static int
241const_fixed_htab_eq (const void *x, const void *y)
242{
3101faab 243 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
244
245 if (GET_MODE (a) != GET_MODE (b))
246 return 0;
247 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
248}
249
173b24b9
RK
250/* Returns a hash code for X (which is a really a mem_attrs *). */
251
252static hashval_t
502b8322 253mem_attrs_htab_hash (const void *x)
173b24b9 254{
f7d504c2 255 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
256
257 return (p->alias ^ (p->align * 1000)
09e881c9 258 ^ (p->addrspace * 4000)
754c3d5d
RS
259 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
260 ^ ((p->size_known_p ? p->size : 0) * 2500000)
78b76d08 261 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
262}
263
f12144dd 264/* Return true if the given memory attributes are equal. */
c13e8210 265
f12144dd
RS
266static bool
267mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 268{
754c3d5d
RS
269 return (p->alias == q->alias
270 && p->offset_known_p == q->offset_known_p
271 && (!p->offset_known_p || p->offset == q->offset)
272 && p->size_known_p == q->size_known_p
273 && (!p->size_known_p || p->size == q->size)
274 && p->align == q->align
09e881c9 275 && p->addrspace == q->addrspace
78b76d08
SB
276 && (p->expr == q->expr
277 || (p->expr != NULL_TREE && q->expr != NULL_TREE
278 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
279}
280
f12144dd
RS
281/* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
283 mem_attrs *). */
173b24b9 284
f12144dd
RS
285static int
286mem_attrs_htab_eq (const void *x, const void *y)
173b24b9 287{
f12144dd
RS
288 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
289}
173b24b9 290
f12144dd 291/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 292
f12144dd
RS
293static void
294set_mem_attrs (rtx mem, mem_attrs *attrs)
295{
296 void **slot;
297
298 /* If everything is the default, we can just clear the attributes. */
299 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
300 {
301 MEM_ATTRS (mem) = 0;
302 return;
303 }
173b24b9 304
f12144dd 305 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
173b24b9
RK
306 if (*slot == 0)
307 {
a9429e29 308 *slot = ggc_alloc_mem_attrs ();
f12144dd 309 memcpy (*slot, attrs, sizeof (mem_attrs));
173b24b9
RK
310 }
311
f12144dd 312 MEM_ATTRS (mem) = (mem_attrs *) *slot;
c13e8210
MM
313}
314
a560d4d4
JH
315/* Returns a hash code for X (which is a really a reg_attrs *). */
316
317static hashval_t
502b8322 318reg_attrs_htab_hash (const void *x)
a560d4d4 319{
741ac903 320 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4 321
9841210f 322 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
323}
324
6356f892 325/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
326 reg_attrs *) is the same as that given by Y (which is also really a
327 reg_attrs *). */
328
329static int
502b8322 330reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 331{
741ac903
KG
332 const reg_attrs *const p = (const reg_attrs *) x;
333 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
334
335 return (p->decl == q->decl && p->offset == q->offset);
336}
337/* Allocate a new reg_attrs structure and insert it into the hash table if
338 one identical to it is not already in the table. We are doing this for
339 MEM of mode MODE. */
340
341static reg_attrs *
502b8322 342get_reg_attrs (tree decl, int offset)
a560d4d4
JH
343{
344 reg_attrs attrs;
345 void **slot;
346
347 /* If everything is the default, we can just return zero. */
348 if (decl == 0 && offset == 0)
349 return 0;
350
351 attrs.decl = decl;
352 attrs.offset = offset;
353
354 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
355 if (*slot == 0)
356 {
a9429e29 357 *slot = ggc_alloc_reg_attrs ();
a560d4d4
JH
358 memcpy (*slot, &attrs, sizeof (reg_attrs));
359 }
360
1b4572a8 361 return (reg_attrs *) *slot;
a560d4d4
JH
362}
363
6fb5fa3c
DB
364
365#if !HAVE_blockage
366/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
367 across this insn. */
368
369rtx
370gen_blockage (void)
371{
372 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
373 MEM_VOLATILE_P (x) = true;
374 return x;
375}
376#endif
377
378
08394eef
BS
379/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
380 don't attempt to share with the various global pieces of rtl (such as
381 frame_pointer_rtx). */
382
383rtx
502b8322 384gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
385{
386 rtx x = gen_rtx_raw_REG (mode, regno);
387 ORIGINAL_REGNO (x) = regno;
388 return x;
389}
390
c5c76735
JL
391/* There are some RTL codes that require special attention; the generation
392 functions do the raw handling. If you add to this list, modify
393 special_rtx in gengenrtl.c as well. */
394
3b80f6ca 395rtx
502b8322 396gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 397{
c13e8210
MM
398 void **slot;
399
3b80f6ca 400 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 401 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
402
403#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
404 if (const_true_rtx && arg == STORE_FLAG_VALUE)
405 return const_true_rtx;
406#endif
407
c13e8210 408 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
409 slot = htab_find_slot_with_hash (const_int_htab, &arg,
410 (hashval_t) arg, INSERT);
29105cea 411 if (*slot == 0)
1f8f4a0b 412 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
413
414 return (rtx) *slot;
3b80f6ca
RH
415}
416
2496c7bd 417rtx
502b8322 418gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
419{
420 return GEN_INT (trunc_int_for_mode (c, mode));
421}
422
5692c7bc
ZW
423/* CONST_DOUBLEs might be created from pairs of integers, or from
424 REAL_VALUE_TYPEs. Also, their length is known only at run time,
425 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
426
427/* Determine whether REAL, a CONST_DOUBLE, already exists in the
428 hash table. If so, return its counterpart; otherwise add it
429 to the hash table and return it. */
430static rtx
502b8322 431lookup_const_double (rtx real)
5692c7bc
ZW
432{
433 void **slot = htab_find_slot (const_double_htab, real, INSERT);
434 if (*slot == 0)
435 *slot = real;
436
437 return (rtx) *slot;
438}
29105cea 439
5692c7bc
ZW
440/* Return a CONST_DOUBLE rtx for a floating-point value specified by
441 VALUE in mode MODE. */
0133b7d9 442rtx
502b8322 443const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 444{
5692c7bc
ZW
445 rtx real = rtx_alloc (CONST_DOUBLE);
446 PUT_MODE (real, mode);
447
9e254451 448 real->u.rv = value;
5692c7bc
ZW
449
450 return lookup_const_double (real);
451}
452
091a3ac7
CF
453/* Determine whether FIXED, a CONST_FIXED, already exists in the
454 hash table. If so, return its counterpart; otherwise add it
455 to the hash table and return it. */
456
457static rtx
458lookup_const_fixed (rtx fixed)
459{
460 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
461 if (*slot == 0)
462 *slot = fixed;
463
464 return (rtx) *slot;
465}
466
467/* Return a CONST_FIXED rtx for a fixed-point value specified by
468 VALUE in mode MODE. */
469
470rtx
471const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
472{
473 rtx fixed = rtx_alloc (CONST_FIXED);
474 PUT_MODE (fixed, mode);
475
476 fixed->u.fv = value;
477
478 return lookup_const_fixed (fixed);
479}
480
3e93ff81
AS
481/* Constructs double_int from rtx CST. */
482
483double_int
484rtx_to_double_int (const_rtx cst)
485{
486 double_int r;
487
488 if (CONST_INT_P (cst))
27bcd47c 489 r = double_int::from_shwi (INTVAL (cst));
48175537 490 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
491 {
492 r.low = CONST_DOUBLE_LOW (cst);
493 r.high = CONST_DOUBLE_HIGH (cst);
494 }
495 else
496 gcc_unreachable ();
497
498 return r;
499}
500
501
54fb1ae0
AS
502/* Return a CONST_DOUBLE or CONST_INT for a value specified as
503 a double_int. */
504
505rtx
506immed_double_int_const (double_int i, enum machine_mode mode)
507{
508 return immed_double_const (i.low, i.high, mode);
509}
510
5692c7bc
ZW
511/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
512 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 513 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
514 implied upper bits are copies of the high bit of i1. The value
515 itself is neither signed nor unsigned. Do not use this routine for
516 non-integer modes; convert to REAL_VALUE_TYPE and use
517 CONST_DOUBLE_FROM_REAL_VALUE. */
5692c7bc
ZW
518
519rtx
502b8322 520immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
521{
522 rtx value;
523 unsigned int i;
524
65acccdd 525 /* There are the following cases (note that there are no modes with
49ab6098 526 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
527
528 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
529 gen_int_mode.
929e10f4
MS
530 2) If the value of the integer fits into HOST_WIDE_INT anyway
531 (i.e., i1 consists only from copies of the sign bit, and sign
532 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 533 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
534 if (mode != VOIDmode)
535 {
5b0264cb
NS
536 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
537 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
538 /* We can get a 0 for an error mark. */
539 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
540 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 541
65acccdd
ZD
542 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
543 return gen_int_mode (i0, mode);
5692c7bc
ZW
544 }
545
546 /* If this integer fits in one word, return a CONST_INT. */
547 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
548 return GEN_INT (i0);
549
550 /* We use VOIDmode for integers. */
551 value = rtx_alloc (CONST_DOUBLE);
552 PUT_MODE (value, VOIDmode);
553
554 CONST_DOUBLE_LOW (value) = i0;
555 CONST_DOUBLE_HIGH (value) = i1;
556
557 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
558 XWINT (value, i) = 0;
559
560 return lookup_const_double (value);
0133b7d9
RH
561}
562
3b80f6ca 563rtx
502b8322 564gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
565{
566 /* In case the MD file explicitly references the frame pointer, have
567 all such references point to the same frame pointer. This is
568 used during frame pointer elimination to distinguish the explicit
569 references to these registers from pseudos that happened to be
570 assigned to them.
571
572 If we have eliminated the frame pointer or arg pointer, we will
573 be using it as a normal register, for example as a spill
574 register. In such cases, we might be accessing it in a mode that
575 is not Pmode and therefore cannot use the pre-allocated rtx.
576
577 Also don't do this when we are making new REGs in reload, since
578 we don't want to get confused with the real pointers. */
579
55a2c322 580 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 581 {
e10c79fe
LB
582 if (regno == FRAME_POINTER_REGNUM
583 && (!reload_completed || frame_pointer_needed))
3b80f6ca 584 return frame_pointer_rtx;
e3339d0f 585#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
586 if (regno == HARD_FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
588 return hard_frame_pointer_rtx;
589#endif
e3339d0f 590#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 591 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
592 return arg_pointer_rtx;
593#endif
594#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 595 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
596 return return_address_pointer_rtx;
597#endif
fc555370 598 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 599 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 600 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 601 return pic_offset_table_rtx;
bcb33994 602 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
603 return stack_pointer_rtx;
604 }
605
006a94b0 606#if 0
6cde4876 607 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
608 an existing entry in that table to avoid useless generation of RTL.
609
610 This code is disabled for now until we can fix the various backends
611 which depend on having non-shared hard registers in some cases. Long
612 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
613 on the amount of useless RTL that gets generated.
614
615 We'll also need to fix some code that runs after reload that wants to
616 set ORIGINAL_REGNO. */
617
6cde4876
JL
618 if (cfun
619 && cfun->emit
620 && regno_reg_rtx
621 && regno < FIRST_PSEUDO_REGISTER
622 && reg_raw_mode[regno] == mode)
623 return regno_reg_rtx[regno];
006a94b0 624#endif
6cde4876 625
08394eef 626 return gen_raw_REG (mode, regno);
3b80f6ca
RH
627}
628
41472af8 629rtx
502b8322 630gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
631{
632 rtx rt = gen_rtx_raw_MEM (mode, addr);
633
634 /* This field is not cleared by the mere allocation of the rtx, so
635 we clear it here. */
173b24b9 636 MEM_ATTRS (rt) = 0;
41472af8
MM
637
638 return rt;
639}
ddef6bc7 640
542a8afa
RH
641/* Generate a memory referring to non-trapping constant memory. */
642
643rtx
644gen_const_mem (enum machine_mode mode, rtx addr)
645{
646 rtx mem = gen_rtx_MEM (mode, addr);
647 MEM_READONLY_P (mem) = 1;
648 MEM_NOTRAP_P (mem) = 1;
649 return mem;
650}
651
bf877a76
R
652/* Generate a MEM referring to fixed portions of the frame, e.g., register
653 save areas. */
654
655rtx
656gen_frame_mem (enum machine_mode mode, rtx addr)
657{
658 rtx mem = gen_rtx_MEM (mode, addr);
659 MEM_NOTRAP_P (mem) = 1;
660 set_mem_alias_set (mem, get_frame_alias_set ());
661 return mem;
662}
663
664/* Generate a MEM referring to a temporary use of the stack, not part
665 of the fixed stack frame. For example, something which is pushed
666 by a target splitter. */
667rtx
668gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
669{
670 rtx mem = gen_rtx_MEM (mode, addr);
671 MEM_NOTRAP_P (mem) = 1;
e3b5732b 672 if (!cfun->calls_alloca)
bf877a76
R
673 set_mem_alias_set (mem, get_frame_alias_set ());
674 return mem;
675}
676
beb72684
RH
677/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
678 this construct would be valid, and false otherwise. */
679
680bool
681validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 682 const_rtx reg, unsigned int offset)
ddef6bc7 683{
beb72684
RH
684 unsigned int isize = GET_MODE_SIZE (imode);
685 unsigned int osize = GET_MODE_SIZE (omode);
686
687 /* All subregs must be aligned. */
688 if (offset % osize != 0)
689 return false;
690
691 /* The subreg offset cannot be outside the inner object. */
692 if (offset >= isize)
693 return false;
694
695 /* ??? This should not be here. Temporarily continue to allow word_mode
696 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
697 Generally, backends are doing something sketchy but it'll take time to
698 fix them all. */
699 if (omode == word_mode)
700 ;
701 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
702 is the culprit here, and not the backends. */
703 else if (osize >= UNITS_PER_WORD && isize >= osize)
704 ;
705 /* Allow component subregs of complex and vector. Though given the below
706 extraction rules, it's not always clear what that means. */
707 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
708 && GET_MODE_INNER (imode) == omode)
709 ;
710 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
711 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
712 represent this. It's questionable if this ought to be represented at
713 all -- why can't this all be hidden in post-reload splitters that make
714 arbitrarily mode changes to the registers themselves. */
715 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
716 ;
717 /* Subregs involving floating point modes are not allowed to
718 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
719 (subreg:SI (reg:DF) 0) isn't. */
720 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
721 {
55a2c322
VM
722 if (! (isize == osize
723 /* LRA can use subreg to store a floating point value in
724 an integer mode. Although the floating point and the
725 integer modes need the same number of hard registers,
726 the size of floating point mode can be less than the
727 integer mode. LRA also uses subregs for a register
728 should be used in different mode in on insn. */
729 || lra_in_progress))
beb72684
RH
730 return false;
731 }
ddef6bc7 732
beb72684
RH
733 /* Paradoxical subregs must have offset zero. */
734 if (osize > isize)
735 return offset == 0;
736
737 /* This is a normal subreg. Verify that the offset is representable. */
738
739 /* For hard registers, we already have most of these rules collected in
740 subreg_offset_representable_p. */
741 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742 {
743 unsigned int regno = REGNO (reg);
744
745#ifdef CANNOT_CHANGE_MODE_CLASS
746 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 && GET_MODE_INNER (imode) == omode)
748 ;
749 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
750 return false;
ddef6bc7 751#endif
beb72684
RH
752
753 return subreg_offset_representable_p (regno, imode, offset, omode);
754 }
755
756 /* For pseudo registers, we want most of the same checks. Namely:
757 If the register no larger than a word, the subreg must be lowpart.
758 If the register is larger than a word, the subreg must be the lowpart
759 of a subword. A subreg does *not* perform arbitrary bit extraction.
760 Given that we've already checked mode/offset alignment, we only have
761 to check subword subregs here. */
55a2c322
VM
762 if (osize < UNITS_PER_WORD
763 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684
RH
764 {
765 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
766 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
767 if (offset % UNITS_PER_WORD != low_off)
768 return false;
769 }
770 return true;
771}
772
773rtx
774gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775{
776 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 777 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
778}
779
173b24b9
RK
780/* Generate a SUBREG representing the least-significant part of REG if MODE
781 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782
ddef6bc7 783rtx
502b8322 784gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
785{
786 enum machine_mode inmode;
ddef6bc7
JJ
787
788 inmode = GET_MODE (reg);
789 if (inmode == VOIDmode)
790 inmode = mode;
e0e08ac2
JH
791 return gen_rtx_SUBREG (mode, reg,
792 subreg_lowpart_offset (mode, inmode));
ddef6bc7 793}
c5c76735 794\f
23b2ce53 795
80379f51
PB
796/* Create an rtvec and stores within it the RTXen passed in the arguments. */
797
23b2ce53 798rtvec
e34d07f2 799gen_rtvec (int n, ...)
23b2ce53 800{
80379f51
PB
801 int i;
802 rtvec rt_val;
e34d07f2 803 va_list p;
23b2ce53 804
e34d07f2 805 va_start (p, n);
23b2ce53 806
80379f51 807 /* Don't allocate an empty rtvec... */
23b2ce53 808 if (n == 0)
0edf1bb2
JL
809 {
810 va_end (p);
811 return NULL_RTVEC;
812 }
23b2ce53 813
80379f51 814 rt_val = rtvec_alloc (n);
4f90e4a0 815
23b2ce53 816 for (i = 0; i < n; i++)
80379f51 817 rt_val->elem[i] = va_arg (p, rtx);
6268b922 818
e34d07f2 819 va_end (p);
80379f51 820 return rt_val;
23b2ce53
RS
821}
822
823rtvec
502b8322 824gen_rtvec_v (int n, rtx *argp)
23b2ce53 825{
b3694847
SS
826 int i;
827 rtvec rt_val;
23b2ce53 828
80379f51 829 /* Don't allocate an empty rtvec... */
23b2ce53 830 if (n == 0)
80379f51 831 return NULL_RTVEC;
23b2ce53 832
80379f51 833 rt_val = rtvec_alloc (n);
23b2ce53
RS
834
835 for (i = 0; i < n; i++)
8f985ec4 836 rt_val->elem[i] = *argp++;
23b2ce53
RS
837
838 return rt_val;
839}
840\f
38ae7651
RS
841/* Return the number of bytes between the start of an OUTER_MODE
842 in-memory value and the start of an INNER_MODE in-memory value,
843 given that the former is a lowpart of the latter. It may be a
844 paradoxical lowpart, in which case the offset will be negative
845 on big-endian targets. */
846
847int
848byte_lowpart_offset (enum machine_mode outer_mode,
849 enum machine_mode inner_mode)
850{
851 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
852 return subreg_lowpart_offset (outer_mode, inner_mode);
853 else
854 return -subreg_lowpart_offset (inner_mode, outer_mode);
855}
856\f
23b2ce53
RS
857/* Generate a REG rtx for a new pseudo register of mode MODE.
858 This pseudo is assigned the next sequential register number. */
859
860rtx
502b8322 861gen_reg_rtx (enum machine_mode mode)
23b2ce53 862{
b3694847 863 rtx val;
2e3f842f 864 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 865
f8335a4f 866 gcc_assert (can_create_pseudo_p ());
23b2ce53 867
2e3f842f
L
868 /* If a virtual register with bigger mode alignment is generated,
869 increase stack alignment estimation because it might be spilled
870 to stack later. */
b8698a0f 871 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
872 && crtl->stack_alignment_estimated < align
873 && !crtl->stack_realign_processed)
ae58e548
JJ
874 {
875 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
876 if (crtl->stack_alignment_estimated < min_align)
877 crtl->stack_alignment_estimated = min_align;
878 }
2e3f842f 879
1b3d8f8a
GK
880 if (generating_concat_p
881 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
882 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
883 {
884 /* For complex modes, don't make a single pseudo.
885 Instead, make a CONCAT of two pseudos.
886 This allows noncontiguous allocation of the real and imaginary parts,
887 which makes much better code. Besides, allocating DCmode
888 pseudos overstrains reload on some machines like the 386. */
889 rtx realpart, imagpart;
27e58a70 890 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
891
892 realpart = gen_reg_rtx (partmode);
893 imagpart = gen_reg_rtx (partmode);
3b80f6ca 894 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
895 }
896
a560d4d4 897 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 898 enough to have an element for this pseudo reg number. */
23b2ce53 899
3e029763 900 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 901 {
3e029763 902 int old_size = crtl->emit.regno_pointer_align_length;
60564289 903 char *tmp;
0d4903b8 904 rtx *new1;
0d4903b8 905
60564289
KG
906 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
907 memset (tmp + old_size, 0, old_size);
908 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 909
1b4572a8 910 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 911 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
912 regno_reg_rtx = new1;
913
3e029763 914 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
915 }
916
08394eef 917 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
918 regno_reg_rtx[reg_rtx_no++] = val;
919 return val;
920}
921
38ae7651
RS
922/* Update NEW with the same attributes as REG, but with OFFSET added
923 to the REG_OFFSET. */
a560d4d4 924
e53a16e7 925static void
60564289 926update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 927{
60564289 928 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 929 REG_OFFSET (reg) + offset);
e53a16e7
ILT
930}
931
38ae7651
RS
932/* Generate a register with same attributes as REG, but with OFFSET
933 added to the REG_OFFSET. */
e53a16e7
ILT
934
935rtx
936gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
937 int offset)
938{
60564289 939 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 940
60564289
KG
941 update_reg_offset (new_rtx, reg, offset);
942 return new_rtx;
e53a16e7
ILT
943}
944
945/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 946 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
947
948rtx
949gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
950{
60564289 951 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 952
60564289
KG
953 update_reg_offset (new_rtx, reg, offset);
954 return new_rtx;
a560d4d4
JH
955}
956
38ae7651
RS
957/* Adjust REG in-place so that it has mode MODE. It is assumed that the
958 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
959
960void
38ae7651 961adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 962{
38ae7651
RS
963 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
964 PUT_MODE (reg, mode);
965}
966
967/* Copy REG's attributes from X, if X has any attributes. If REG and X
968 have different modes, REG is a (possibly paradoxical) lowpart of X. */
969
970void
971set_reg_attrs_from_value (rtx reg, rtx x)
972{
973 int offset;
de6f3f7a
L
974 bool can_be_reg_pointer = true;
975
976 /* Don't call mark_reg_pointer for incompatible pointer sign
977 extension. */
978 while (GET_CODE (x) == SIGN_EXTEND
979 || GET_CODE (x) == ZERO_EXTEND
980 || GET_CODE (x) == TRUNCATE
981 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
982 {
983#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
984 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
985 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
986 can_be_reg_pointer = false;
987#endif
988 x = XEXP (x, 0);
989 }
38ae7651 990
923ba36f
JJ
991 /* Hard registers can be reused for multiple purposes within the same
992 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
993 on them is wrong. */
994 if (HARD_REGISTER_P (reg))
995 return;
996
38ae7651 997 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
998 if (MEM_P (x))
999 {
527210c4
RS
1000 if (MEM_OFFSET_KNOWN_P (x))
1001 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1002 MEM_OFFSET (x) + offset);
de6f3f7a 1003 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1004 mark_reg_pointer (reg, 0);
46b71b03
PB
1005 }
1006 else if (REG_P (x))
1007 {
1008 if (REG_ATTRS (x))
1009 update_reg_offset (reg, x, offset);
de6f3f7a 1010 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1011 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1012 }
1013}
1014
1015/* Generate a REG rtx for a new pseudo register, copying the mode
1016 and attributes from X. */
1017
1018rtx
1019gen_reg_rtx_and_attrs (rtx x)
1020{
1021 rtx reg = gen_reg_rtx (GET_MODE (x));
1022 set_reg_attrs_from_value (reg, x);
1023 return reg;
a560d4d4
JH
1024}
1025
9d18e06b
JZ
1026/* Set the register attributes for registers contained in PARM_RTX.
1027 Use needed values from memory attributes of MEM. */
1028
1029void
502b8322 1030set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1031{
f8cfc6aa 1032 if (REG_P (parm_rtx))
38ae7651 1033 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1034 else if (GET_CODE (parm_rtx) == PARALLEL)
1035 {
1036 /* Check for a NULL entry in the first slot, used to indicate that the
1037 parameter goes both on the stack and in registers. */
1038 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1039 for (; i < XVECLEN (parm_rtx, 0); i++)
1040 {
1041 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1042 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1043 REG_ATTRS (XEXP (x, 0))
1044 = get_reg_attrs (MEM_EXPR (mem),
1045 INTVAL (XEXP (x, 1)));
1046 }
1047 }
1048}
1049
38ae7651
RS
1050/* Set the REG_ATTRS for registers in value X, given that X represents
1051 decl T. */
a560d4d4 1052
4e3825db 1053void
38ae7651
RS
1054set_reg_attrs_for_decl_rtl (tree t, rtx x)
1055{
1056 if (GET_CODE (x) == SUBREG)
fbe6ec81 1057 {
38ae7651
RS
1058 gcc_assert (subreg_lowpart_p (x));
1059 x = SUBREG_REG (x);
fbe6ec81 1060 }
f8cfc6aa 1061 if (REG_P (x))
38ae7651
RS
1062 REG_ATTRS (x)
1063 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1064 DECL_MODE (t)));
a560d4d4
JH
1065 if (GET_CODE (x) == CONCAT)
1066 {
1067 if (REG_P (XEXP (x, 0)))
1068 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1069 if (REG_P (XEXP (x, 1)))
1070 REG_ATTRS (XEXP (x, 1))
1071 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1072 }
1073 if (GET_CODE (x) == PARALLEL)
1074 {
d4afac5b
JZ
1075 int i, start;
1076
1077 /* Check for a NULL entry, used to indicate that the parameter goes
1078 both on the stack and in registers. */
1079 if (XEXP (XVECEXP (x, 0, 0), 0))
1080 start = 0;
1081 else
1082 start = 1;
1083
1084 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1085 {
1086 rtx y = XVECEXP (x, 0, i);
1087 if (REG_P (XEXP (y, 0)))
1088 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1089 }
1090 }
1091}
1092
38ae7651
RS
1093/* Assign the RTX X to declaration T. */
1094
1095void
1096set_decl_rtl (tree t, rtx x)
1097{
1098 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1099 if (x)
1100 set_reg_attrs_for_decl_rtl (t, x);
1101}
1102
5141868d
RS
1103/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1104 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1105
1106void
5141868d 1107set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1108{
1109 DECL_INCOMING_RTL (t) = x;
5141868d 1110 if (x && !by_reference_p)
38ae7651
RS
1111 set_reg_attrs_for_decl_rtl (t, x);
1112}
1113
754fdcca
RK
1114/* Identify REG (which may be a CONCAT) as a user register. */
1115
1116void
502b8322 1117mark_user_reg (rtx reg)
754fdcca
RK
1118{
1119 if (GET_CODE (reg) == CONCAT)
1120 {
1121 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1122 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1123 }
754fdcca 1124 else
5b0264cb
NS
1125 {
1126 gcc_assert (REG_P (reg));
1127 REG_USERVAR_P (reg) = 1;
1128 }
754fdcca
RK
1129}
1130
86fe05e0
RK
1131/* Identify REG as a probable pointer register and show its alignment
1132 as ALIGN, if nonzero. */
23b2ce53
RS
1133
1134void
502b8322 1135mark_reg_pointer (rtx reg, int align)
23b2ce53 1136{
3502dc9c 1137 if (! REG_POINTER (reg))
00995e78 1138 {
3502dc9c 1139 REG_POINTER (reg) = 1;
86fe05e0 1140
00995e78
RE
1141 if (align)
1142 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1143 }
1144 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1145 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1146 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1147}
1148
1149/* Return 1 plus largest pseudo reg number used in the current function. */
1150
1151int
502b8322 1152max_reg_num (void)
23b2ce53
RS
1153{
1154 return reg_rtx_no;
1155}
1156
1157/* Return 1 + the largest label number used so far in the current function. */
1158
1159int
502b8322 1160max_label_num (void)
23b2ce53 1161{
23b2ce53
RS
1162 return label_num;
1163}
1164
1165/* Return first label number used in this function (if any were used). */
1166
1167int
502b8322 1168get_first_label_num (void)
23b2ce53
RS
1169{
1170 return first_label_num;
1171}
6de9cd9a
DN
1172
1173/* If the rtx for label was created during the expansion of a nested
1174 function, then first_label_num won't include this label number.
fa10beec 1175 Fix this now so that array indices work later. */
6de9cd9a
DN
1176
1177void
1178maybe_set_first_label_num (rtx x)
1179{
1180 if (CODE_LABEL_NUMBER (x) < first_label_num)
1181 first_label_num = CODE_LABEL_NUMBER (x);
1182}
23b2ce53
RS
1183\f
1184/* Return a value representing some low-order bits of X, where the number
1185 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1186 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1187 representation is returned.
1188
1189 This function handles the cases in common between gen_lowpart, below,
1190 and two variants in cse.c and combine.c. These are the cases that can
1191 be safely handled at all points in the compilation.
1192
1193 If this is not a case we can handle, return 0. */
1194
1195rtx
502b8322 1196gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1197{
ddef6bc7 1198 int msize = GET_MODE_SIZE (mode);
550d1387 1199 int xsize;
ddef6bc7 1200 int offset = 0;
550d1387
GK
1201 enum machine_mode innermode;
1202
1203 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1204 so we have to make one up. Yuk. */
1205 innermode = GET_MODE (x);
481683e1 1206 if (CONST_INT_P (x)
db487452 1207 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1208 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1209 else if (innermode == VOIDmode)
49ab6098 1210 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1211
550d1387
GK
1212 xsize = GET_MODE_SIZE (innermode);
1213
5b0264cb 1214 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1215
550d1387 1216 if (innermode == mode)
23b2ce53
RS
1217 return x;
1218
1219 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1220 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1221 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1222 return 0;
1223
53501a19 1224 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1225 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1226 return 0;
1227
550d1387 1228 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1229
1230 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1231 && (GET_MODE_CLASS (mode) == MODE_INT
1232 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1233 {
1234 /* If we are getting the low-order part of something that has been
1235 sign- or zero-extended, we can either just use the object being
1236 extended or make a narrower extension. If we want an even smaller
1237 piece than the size of the object being extended, call ourselves
1238 recursively.
1239
1240 This case is used mostly by combine and cse. */
1241
1242 if (GET_MODE (XEXP (x, 0)) == mode)
1243 return XEXP (x, 0);
550d1387 1244 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1245 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1246 else if (msize < xsize)
3b80f6ca 1247 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1248 }
f8cfc6aa 1249 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1250 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1251 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
550d1387 1252 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1253
23b2ce53
RS
1254 /* Otherwise, we can't do this. */
1255 return 0;
1256}
1257\f
ccba022b 1258rtx
502b8322 1259gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1260{
ddef6bc7 1261 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1262 rtx result;
ddef6bc7 1263
ccba022b
RS
1264 /* This case loses if X is a subreg. To catch bugs early,
1265 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1266 gcc_assert (msize <= UNITS_PER_WORD
1267 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1268
e0e08ac2
JH
1269 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1270 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1271 gcc_assert (result);
b8698a0f 1272
09482e0d
JW
1273 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1274 the target if we have a MEM. gen_highpart must return a valid operand,
1275 emitting code if necessary to do so. */
5b0264cb
NS
1276 if (MEM_P (result))
1277 {
1278 result = validize_mem (result);
1279 gcc_assert (result);
1280 }
b8698a0f 1281
e0e08ac2
JH
1282 return result;
1283}
5222e470 1284
26d249eb 1285/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1286 be VOIDmode constant. */
1287rtx
502b8322 1288gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1289{
1290 if (GET_MODE (exp) != VOIDmode)
1291 {
5b0264cb 1292 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1293 return gen_highpart (outermode, exp);
1294 }
1295 return simplify_gen_subreg (outermode, exp, innermode,
1296 subreg_highpart_offset (outermode, innermode));
1297}
68252e27 1298
38ae7651 1299/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1300
e0e08ac2 1301unsigned int
502b8322 1302subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1303{
1304 unsigned int offset = 0;
1305 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1306
e0e08ac2 1307 if (difference > 0)
ccba022b 1308 {
e0e08ac2
JH
1309 if (WORDS_BIG_ENDIAN)
1310 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1311 if (BYTES_BIG_ENDIAN)
1312 offset += difference % UNITS_PER_WORD;
ccba022b 1313 }
ddef6bc7 1314
e0e08ac2 1315 return offset;
ccba022b 1316}
eea50aa0 1317
e0e08ac2
JH
1318/* Return offset in bytes to get OUTERMODE high part
1319 of the value in mode INNERMODE stored in memory in target format. */
1320unsigned int
502b8322 1321subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1322{
1323 unsigned int offset = 0;
1324 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1325
5b0264cb 1326 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1327
eea50aa0
JH
1328 if (difference > 0)
1329 {
e0e08ac2 1330 if (! WORDS_BIG_ENDIAN)
eea50aa0 1331 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1332 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1333 offset += difference % UNITS_PER_WORD;
1334 }
1335
e0e08ac2 1336 return offset;
eea50aa0 1337}
ccba022b 1338
23b2ce53
RS
1339/* Return 1 iff X, assumed to be a SUBREG,
1340 refers to the least significant part of its containing reg.
1341 If X is not a SUBREG, always return 1 (it is its own low part!). */
1342
1343int
fa233e34 1344subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1345{
1346 if (GET_CODE (x) != SUBREG)
1347 return 1;
a3a03040
RK
1348 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1349 return 0;
23b2ce53 1350
e0e08ac2
JH
1351 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1352 == SUBREG_BYTE (x));
23b2ce53 1353}
6a4bdc79
BS
1354
1355/* Return true if X is a paradoxical subreg, false otherwise. */
1356bool
1357paradoxical_subreg_p (const_rtx x)
1358{
1359 if (GET_CODE (x) != SUBREG)
1360 return false;
1361 return (GET_MODE_PRECISION (GET_MODE (x))
1362 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1363}
23b2ce53 1364\f
ddef6bc7
JJ
1365/* Return subword OFFSET of operand OP.
1366 The word number, OFFSET, is interpreted as the word number starting
1367 at the low-order address. OFFSET 0 is the low-order word if not
1368 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1369
1370 If we cannot extract the required word, we return zero. Otherwise,
1371 an rtx corresponding to the requested word will be returned.
1372
1373 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1374 reload has completed, a valid address will always be returned. After
1375 reload, if a valid address cannot be returned, we return zero.
1376
1377 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1378 it is the responsibility of the caller.
1379
1380 MODE is the mode of OP in case it is a CONST_INT.
1381
1382 ??? This is still rather broken for some cases. The problem for the
1383 moment is that all callers of this thing provide no 'goal mode' to
1384 tell us to work with. This exists because all callers were written
0631e0bf
JH
1385 in a word based SUBREG world.
1386 Now use of this function can be deprecated by simplify_subreg in most
1387 cases.
1388 */
ddef6bc7
JJ
1389
1390rtx
502b8322 1391operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1392{
1393 if (mode == VOIDmode)
1394 mode = GET_MODE (op);
1395
5b0264cb 1396 gcc_assert (mode != VOIDmode);
ddef6bc7 1397
30f7a378 1398 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1399 if (mode != BLKmode
1400 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1401 return 0;
1402
30f7a378 1403 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1404 if (mode != BLKmode
1405 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1406 return const0_rtx;
1407
ddef6bc7 1408 /* Form a new MEM at the requested address. */
3c0cb5de 1409 if (MEM_P (op))
ddef6bc7 1410 {
60564289 1411 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1412
f1ec5147 1413 if (! validate_address)
60564289 1414 return new_rtx;
f1ec5147
RK
1415
1416 else if (reload_completed)
ddef6bc7 1417 {
09e881c9
BE
1418 if (! strict_memory_address_addr_space_p (word_mode,
1419 XEXP (new_rtx, 0),
1420 MEM_ADDR_SPACE (op)))
f1ec5147 1421 return 0;
ddef6bc7 1422 }
f1ec5147 1423 else
60564289 1424 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1425 }
1426
0631e0bf
JH
1427 /* Rest can be handled by simplify_subreg. */
1428 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1429}
1430
535a42b1
NS
1431/* Similar to `operand_subword', but never return 0. If we can't
1432 extract the required subword, put OP into a register and try again.
1433 The second attempt must succeed. We always validate the address in
1434 this case.
23b2ce53
RS
1435
1436 MODE is the mode of OP, in case it is CONST_INT. */
1437
1438rtx
502b8322 1439operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1440{
ddef6bc7 1441 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1442
1443 if (result)
1444 return result;
1445
1446 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1447 {
1448 /* If this is a register which can not be accessed by words, copy it
1449 to a pseudo register. */
f8cfc6aa 1450 if (REG_P (op))
77e6b0eb
JC
1451 op = copy_to_reg (op);
1452 else
1453 op = force_reg (mode, op);
1454 }
23b2ce53 1455
ddef6bc7 1456 result = operand_subword (op, offset, 1, mode);
5b0264cb 1457 gcc_assert (result);
23b2ce53
RS
1458
1459 return result;
1460}
1461\f
2b3493c8
AK
1462/* Returns 1 if both MEM_EXPR can be considered equal
1463 and 0 otherwise. */
1464
1465int
4f588890 1466mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1467{
1468 if (expr1 == expr2)
1469 return 1;
1470
1471 if (! expr1 || ! expr2)
1472 return 0;
1473
1474 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1475 return 0;
1476
55b34b5f 1477 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1478}
1479
805903b5
JJ
1480/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1481 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1482 -1 if not known. */
1483
1484int
d9223014 1485get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1486{
1487 tree expr;
1488 unsigned HOST_WIDE_INT offset;
1489
1490 /* This function can't use
527210c4 1491 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1492 || (MAX (MEM_ALIGN (mem),
0eb77834 1493 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1494 < align))
1495 return -1;
1496 else
527210c4 1497 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1498 for two reasons:
1499 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1500 for <variable>. get_inner_reference doesn't handle it and
1501 even if it did, the alignment in that case needs to be determined
1502 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1503 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1504 isn't sufficiently aligned, the object it is in might be. */
1505 gcc_assert (MEM_P (mem));
1506 expr = MEM_EXPR (mem);
527210c4 1507 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1508 return -1;
1509
527210c4 1510 offset = MEM_OFFSET (mem);
805903b5
JJ
1511 if (DECL_P (expr))
1512 {
1513 if (DECL_ALIGN (expr) < align)
1514 return -1;
1515 }
1516 else if (INDIRECT_REF_P (expr))
1517 {
1518 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1519 return -1;
1520 }
1521 else if (TREE_CODE (expr) == COMPONENT_REF)
1522 {
1523 while (1)
1524 {
1525 tree inner = TREE_OPERAND (expr, 0);
1526 tree field = TREE_OPERAND (expr, 1);
1527 tree byte_offset = component_ref_field_offset (expr);
1528 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1529
1530 if (!byte_offset
1531 || !host_integerp (byte_offset, 1)
1532 || !host_integerp (bit_offset, 1))
1533 return -1;
1534
1535 offset += tree_low_cst (byte_offset, 1);
1536 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1537
1538 if (inner == NULL_TREE)
1539 {
1540 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1541 < (unsigned int) align)
1542 return -1;
1543 break;
1544 }
1545 else if (DECL_P (inner))
1546 {
1547 if (DECL_ALIGN (inner) < align)
1548 return -1;
1549 break;
1550 }
1551 else if (TREE_CODE (inner) != COMPONENT_REF)
1552 return -1;
1553 expr = inner;
1554 }
1555 }
1556 else
1557 return -1;
1558
1559 return offset & ((align / BITS_PER_UNIT) - 1);
1560}
1561
6926c713 1562/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1563 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1564 if we are making a new object of this type. BITPOS is nonzero if
1565 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1566
1567void
502b8322
AJ
1568set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1569 HOST_WIDE_INT bitpos)
173b24b9 1570{
6f1087be 1571 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1572 tree type;
f12144dd 1573 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1574 addr_space_t as;
173b24b9
RK
1575
1576 /* It can happen that type_for_mode was given a mode for which there
1577 is no language-level type. In which case it returns NULL, which
1578 we can see here. */
1579 if (t == NULL_TREE)
1580 return;
1581
1582 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1583 if (type == error_mark_node)
1584 return;
173b24b9 1585
173b24b9
RK
1586 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1587 wrong answer, as it assumes that DECL_RTL already has the right alias
1588 info. Callers should not set DECL_RTL until after the call to
1589 set_mem_attributes. */
5b0264cb 1590 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1591
f12144dd
RS
1592 memset (&attrs, 0, sizeof (attrs));
1593
738cc472 1594 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1595 front-end routine) and use it. */
f12144dd 1596 attrs.alias = get_alias_set (t);
173b24b9 1597
a5e9c810 1598 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1599 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1600
268f7033 1601 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1602 refattrs = MEM_ATTRS (ref);
1603 if (refattrs)
268f7033
UW
1604 {
1605 /* ??? Can this ever happen? Calling this routine on a MEM that
1606 already carries memory attributes should probably be invalid. */
f12144dd 1607 attrs.expr = refattrs->expr;
754c3d5d 1608 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1609 attrs.offset = refattrs->offset;
754c3d5d 1610 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1611 attrs.size = refattrs->size;
1612 attrs.align = refattrs->align;
268f7033
UW
1613 }
1614
1615 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1616 else
268f7033 1617 {
f12144dd
RS
1618 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1619 gcc_assert (!defattrs->expr);
754c3d5d 1620 gcc_assert (!defattrs->offset_known_p);
f12144dd 1621
268f7033 1622 /* Respect mode size. */
754c3d5d 1623 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1624 attrs.size = defattrs->size;
268f7033
UW
1625 /* ??? Is this really necessary? We probably should always get
1626 the size from the type below. */
1627
1628 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1629 if T is an object, always compute the object alignment below. */
f12144dd
RS
1630 if (TYPE_P (t))
1631 attrs.align = defattrs->align;
1632 else
1633 attrs.align = BITS_PER_UNIT;
268f7033
UW
1634 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1635 e.g. if the type carries an alignment attribute. Should we be
1636 able to simply always use TYPE_ALIGN? */
1637 }
1638
c3d32120
RK
1639 /* We can set the alignment from the type if we are making an object,
1640 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1641 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1642 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1643
70f34814
RG
1644 else if (TREE_CODE (t) == MEM_REF)
1645 {
a80903ff 1646 tree op0 = TREE_OPERAND (t, 0);
3e32c761
RG
1647 if (TREE_CODE (op0) == ADDR_EXPR
1648 && (DECL_P (TREE_OPERAND (op0, 0))
1649 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
70f34814 1650 {
3e32c761 1651 if (DECL_P (TREE_OPERAND (op0, 0)))
f12144dd 1652 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
3e32c761
RG
1653 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1654 {
f12144dd 1655 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
70f34814 1656#ifdef CONSTANT_ALIGNMENT
f12144dd
RS
1657 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1658 attrs.align);
70f34814 1659#endif
3e32c761
RG
1660 }
1661 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1662 {
1663 unsigned HOST_WIDE_INT ioff
1664 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1665 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
f12144dd 1666 attrs.align = MIN (aoff, attrs.align);
3e32c761 1667 }
70f34814
RG
1668 }
1669 else
5951297a
EB
1670 /* ??? This isn't fully correct, we can't set the alignment from the
1671 type in all cases. */
f12144dd 1672 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
70f34814 1673 }
a80903ff 1674
9407f6bc
RG
1675 else if (TREE_CODE (t) == TARGET_MEM_REF)
1676 /* ??? This isn't fully correct, we can't set the alignment from the
1677 type in all cases. */
f12144dd 1678 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
9407f6bc 1679
738cc472 1680 /* If the size is known, we can set that. */
a787ccc3 1681 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1682
80965c18
RK
1683 /* If T is not a type, we may be able to deduce some more information about
1684 the expression. */
1685 if (! TYPE_P (t))
8ac61af7 1686 {
8476af98 1687 tree base;
df96b059 1688 bool align_computed = false;
389fdba0 1689
8ac61af7
RK
1690 if (TREE_THIS_VOLATILE (t))
1691 MEM_VOLATILE_P (ref) = 1;
173b24b9 1692
c56e3582
RK
1693 /* Now remove any conversions: they don't change what the underlying
1694 object is. Likewise for SAVE_EXPR. */
1043771b 1695 while (CONVERT_EXPR_P (t)
c56e3582
RK
1696 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1697 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1698 t = TREE_OPERAND (t, 0);
1699
4994da65
RG
1700 /* Note whether this expression can trap. */
1701 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1702
1703 base = get_base_address (t);
f18a7b25
MJ
1704 if (base)
1705 {
1706 if (DECL_P (base)
1707 && TREE_READONLY (base)
1708 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1709 && !TREE_THIS_VOLATILE (base))
1710 MEM_READONLY_P (ref) = 1;
1711
1712 /* Mark static const strings readonly as well. */
1713 if (TREE_CODE (base) == STRING_CST
1714 && TREE_READONLY (base)
1715 && TREE_STATIC (base))
1716 MEM_READONLY_P (ref) = 1;
1717
1718 if (TREE_CODE (base) == MEM_REF
1719 || TREE_CODE (base) == TARGET_MEM_REF)
1720 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1721 0))));
1722 else
1723 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1724 }
1725 else
1726 as = TYPE_ADDR_SPACE (type);
ba30e50d 1727
2039d7aa
RH
1728 /* If this expression uses it's parent's alias set, mark it such
1729 that we won't change it. */
1730 if (component_uses_parent_alias_set (t))
10b76d73
RK
1731 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1732
8ac61af7
RK
1733 /* If this is a decl, set the attributes of the MEM from it. */
1734 if (DECL_P (t))
1735 {
f12144dd 1736 attrs.expr = t;
754c3d5d
RS
1737 attrs.offset_known_p = true;
1738 attrs.offset = 0;
6f1087be 1739 apply_bitpos = bitpos;
a787ccc3 1740 new_size = DECL_SIZE_UNIT (t);
f12144dd 1741 attrs.align = DECL_ALIGN (t);
df96b059 1742 align_computed = true;
8ac61af7
RK
1743 }
1744
40c0668b 1745 /* If this is a constant, we know the alignment. */
6615c446 1746 else if (CONSTANT_CLASS_P (t))
9ddfb1a7 1747 {
f12144dd 1748 attrs.align = TYPE_ALIGN (type);
9ddfb1a7 1749#ifdef CONSTANT_ALIGNMENT
f12144dd 1750 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
9ddfb1a7 1751#endif
df96b059 1752 align_computed = true;
9ddfb1a7 1753 }
998d7deb 1754
a787ccc3
RS
1755 /* If this is a field reference, record it. */
1756 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1757 {
f12144dd 1758 attrs.expr = t;
754c3d5d
RS
1759 attrs.offset_known_p = true;
1760 attrs.offset = 0;
6f1087be 1761 apply_bitpos = bitpos;
a787ccc3
RS
1762 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1763 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1764 }
1765
1766 /* If this is an array reference, look for an outer field reference. */
1767 else if (TREE_CODE (t) == ARRAY_REF)
1768 {
1769 tree off_tree = size_zero_node;
1b1838b6
JW
1770 /* We can't modify t, because we use it at the end of the
1771 function. */
1772 tree t2 = t;
998d7deb
RH
1773
1774 do
1775 {
1b1838b6 1776 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1777 tree low_bound = array_ref_low_bound (t2);
1778 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1779
1780 /* We assume all arrays have sizes that are a multiple of a byte.
1781 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1782 index, then convert to sizetype and multiply by the size of
1783 the array element. */
1784 if (! integer_zerop (low_bound))
4845b383
KH
1785 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1786 index, low_bound);
2567406a 1787
44de5aeb 1788 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1789 size_binop (MULT_EXPR,
1790 fold_convert (sizetype,
1791 index),
44de5aeb
RK
1792 unit_size),
1793 off_tree);
1b1838b6 1794 t2 = TREE_OPERAND (t2, 0);
998d7deb 1795 }
1b1838b6 1796 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1797
1b1838b6 1798 if (DECL_P (t2))
c67a1cf6 1799 {
f12144dd 1800 attrs.expr = t2;
754c3d5d 1801 attrs.offset_known_p = false;
c67a1cf6 1802 if (host_integerp (off_tree, 1))
40cb04f1
RH
1803 {
1804 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1805 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
f12144dd
RS
1806 attrs.align = DECL_ALIGN (t2);
1807 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1808 attrs.align = aoff;
df96b059 1809 align_computed = true;
754c3d5d
RS
1810 attrs.offset_known_p = true;
1811 attrs.offset = ioff;
6f1087be 1812 apply_bitpos = bitpos;
40cb04f1 1813 }
c67a1cf6 1814 }
1b1838b6 1815 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1816 {
f12144dd 1817 attrs.expr = t2;
754c3d5d 1818 attrs.offset_known_p = false;
998d7deb 1819 if (host_integerp (off_tree, 1))
6f1087be 1820 {
754c3d5d
RS
1821 attrs.offset_known_p = true;
1822 attrs.offset = tree_low_cst (off_tree, 1);
6f1087be
RH
1823 apply_bitpos = bitpos;
1824 }
998d7deb
RH
1825 /* ??? Any reason the field size would be different than
1826 the size we got from the type? */
1827 }
c67a1cf6
RH
1828 }
1829
56c47f22 1830 /* If this is an indirect reference, record it. */
70f34814 1831 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1832 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1833 {
f12144dd 1834 attrs.expr = t;
754c3d5d
RS
1835 attrs.offset_known_p = true;
1836 attrs.offset = 0;
56c47f22
RG
1837 apply_bitpos = bitpos;
1838 }
1839
0eb77834 1840 if (!align_computed)
df96b059 1841 {
0eb77834 1842 unsigned int obj_align = get_object_alignment (t);
f12144dd 1843 attrs.align = MAX (attrs.align, obj_align);
df96b059 1844 }
8ac61af7 1845 }
f18a7b25
MJ
1846 else
1847 as = TYPE_ADDR_SPACE (type);
8ac61af7 1848
a787ccc3
RS
1849 if (host_integerp (new_size, 1))
1850 {
1851 attrs.size_known_p = true;
1852 attrs.size = tree_low_cst (new_size, 1);
1853 }
1854
15c812e3 1855 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1856 bit position offset. Similarly, increase the size of the accessed
1857 object to contain the negative offset. */
6f1087be 1858 if (apply_bitpos)
8c317c5f 1859 {
754c3d5d
RS
1860 gcc_assert (attrs.offset_known_p);
1861 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1862 if (attrs.size_known_p)
1863 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1864 }
6f1087be 1865
8ac61af7 1866 /* Now set the attributes we computed above. */
f18a7b25 1867 attrs.addrspace = as;
f12144dd 1868 set_mem_attrs (ref, &attrs);
173b24b9
RK
1869}
1870
6f1087be 1871void
502b8322 1872set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1873{
1874 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1875}
1876
173b24b9
RK
1877/* Set the alias set of MEM to SET. */
1878
1879void
4862826d 1880set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1881{
f12144dd
RS
1882 struct mem_attrs attrs;
1883
173b24b9 1884 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1885 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
1886 attrs = *get_mem_attrs (mem);
1887 attrs.alias = set;
1888 set_mem_attrs (mem, &attrs);
09e881c9
BE
1889}
1890
1891/* Set the address space of MEM to ADDRSPACE (target-defined). */
1892
1893void
1894set_mem_addr_space (rtx mem, addr_space_t addrspace)
1895{
f12144dd
RS
1896 struct mem_attrs attrs;
1897
1898 attrs = *get_mem_attrs (mem);
1899 attrs.addrspace = addrspace;
1900 set_mem_attrs (mem, &attrs);
173b24b9 1901}
738cc472 1902
d022d93e 1903/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1904
1905void
502b8322 1906set_mem_align (rtx mem, unsigned int align)
738cc472 1907{
f12144dd
RS
1908 struct mem_attrs attrs;
1909
1910 attrs = *get_mem_attrs (mem);
1911 attrs.align = align;
1912 set_mem_attrs (mem, &attrs);
738cc472 1913}
1285011e 1914
998d7deb 1915/* Set the expr for MEM to EXPR. */
1285011e
RK
1916
1917void
502b8322 1918set_mem_expr (rtx mem, tree expr)
1285011e 1919{
f12144dd
RS
1920 struct mem_attrs attrs;
1921
1922 attrs = *get_mem_attrs (mem);
1923 attrs.expr = expr;
1924 set_mem_attrs (mem, &attrs);
1285011e 1925}
998d7deb
RH
1926
1927/* Set the offset of MEM to OFFSET. */
1928
1929void
527210c4 1930set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 1931{
f12144dd
RS
1932 struct mem_attrs attrs;
1933
1934 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1935 attrs.offset_known_p = true;
1936 attrs.offset = offset;
527210c4
RS
1937 set_mem_attrs (mem, &attrs);
1938}
1939
1940/* Clear the offset of MEM. */
1941
1942void
1943clear_mem_offset (rtx mem)
1944{
1945 struct mem_attrs attrs;
1946
1947 attrs = *get_mem_attrs (mem);
754c3d5d 1948 attrs.offset_known_p = false;
f12144dd 1949 set_mem_attrs (mem, &attrs);
35aff10b
AM
1950}
1951
1952/* Set the size of MEM to SIZE. */
1953
1954void
f5541398 1955set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 1956{
f12144dd
RS
1957 struct mem_attrs attrs;
1958
1959 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1960 attrs.size_known_p = true;
1961 attrs.size = size;
f5541398
RS
1962 set_mem_attrs (mem, &attrs);
1963}
1964
1965/* Clear the size of MEM. */
1966
1967void
1968clear_mem_size (rtx mem)
1969{
1970 struct mem_attrs attrs;
1971
1972 attrs = *get_mem_attrs (mem);
754c3d5d 1973 attrs.size_known_p = false;
f12144dd 1974 set_mem_attrs (mem, &attrs);
998d7deb 1975}
173b24b9 1976\f
738cc472
RK
1977/* Return a memory reference like MEMREF, but with its mode changed to MODE
1978 and its address changed to ADDR. (VOIDmode means don't change the mode.
1979 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1980 returned memory location is required to be valid. The memory
1981 attributes are not changed. */
23b2ce53 1982
738cc472 1983static rtx
502b8322 1984change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1985{
09e881c9 1986 addr_space_t as;
60564289 1987 rtx new_rtx;
23b2ce53 1988
5b0264cb 1989 gcc_assert (MEM_P (memref));
09e881c9 1990 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
1991 if (mode == VOIDmode)
1992 mode = GET_MODE (memref);
1993 if (addr == 0)
1994 addr = XEXP (memref, 0);
a74ff877 1995 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 1996 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 1997 return memref;
23b2ce53 1998
f1ec5147 1999 if (validate)
23b2ce53 2000 {
f1ec5147 2001 if (reload_in_progress || reload_completed)
09e881c9 2002 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2003 else
09e881c9 2004 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2005 }
750c9258 2006
9b04c6a8
RK
2007 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2008 return memref;
2009
60564289
KG
2010 new_rtx = gen_rtx_MEM (mode, addr);
2011 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2012 return new_rtx;
23b2ce53 2013}
792760b9 2014
738cc472
RK
2015/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2016 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2017
2018rtx
502b8322 2019change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 2020{
f12144dd 2021 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
60564289 2022 enum machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2023 struct mem_attrs attrs, *defattrs;
4e44c1ef 2024
f12144dd
RS
2025 attrs = *get_mem_attrs (memref);
2026 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2027 attrs.expr = NULL_TREE;
2028 attrs.offset_known_p = false;
2029 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2030 attrs.size = defattrs->size;
2031 attrs.align = defattrs->align;
c2f7bcc3 2032
fdb1c7b3 2033 /* If there are no changes, just return the original memory reference. */
60564289 2034 if (new_rtx == memref)
4e44c1ef 2035 {
f12144dd 2036 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2037 return new_rtx;
4e44c1ef 2038
60564289
KG
2039 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2040 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2041 }
fdb1c7b3 2042
f12144dd 2043 set_mem_attrs (new_rtx, &attrs);
60564289 2044 return new_rtx;
f4ef873c 2045}
792760b9 2046
738cc472
RK
2047/* Return a memory reference like MEMREF, but with its mode changed
2048 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2049 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2050 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2051 and the caller is responsible for adjusting MEMREF base register.
2052 If ADJUST_OBJECT is zero, the underlying object associated with the
2053 memory reference is left unchanged and the caller is responsible for
2054 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2055 the underlying object, even partially, then the object is dropped.
2056 SIZE, if nonzero, is the size of an access in cases where MODE
2057 has no inherent size. */
f1ec5147
RK
2058
2059rtx
502b8322 2060adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2061 int validate, int adjust_address, int adjust_object,
2062 HOST_WIDE_INT size)
f1ec5147 2063{
823e3574 2064 rtx addr = XEXP (memref, 0);
60564289 2065 rtx new_rtx;
f12144dd 2066 enum machine_mode address_mode;
a6fe9ed4 2067 int pbits;
0207fa90 2068 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2069 unsigned HOST_WIDE_INT max_align;
0207fa90
EB
2070#ifdef POINTERS_EXTEND_UNSIGNED
2071 enum machine_mode pointer_mode
2072 = targetm.addr_space.pointer_mode (attrs.addrspace);
2073#endif
823e3574 2074
5f2cbd0d
RS
2075 /* Take the size of non-BLKmode accesses from the mode. */
2076 defattrs = mode_mem_attrs[(int) mode];
2077 if (defattrs->size_known_p)
2078 size = defattrs->size;
2079
fdb1c7b3
JH
2080 /* If there are no changes, just return the original memory reference. */
2081 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2082 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2083 && (!validate || memory_address_addr_space_p (mode, addr,
2084 attrs.addrspace)))
fdb1c7b3
JH
2085 return memref;
2086
d14419e4 2087 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2088 This may happen even if offset is nonzero -- consider
d14419e4
RH
2089 (plus (plus reg reg) const_int) -- so do this always. */
2090 addr = copy_rtx (addr);
2091
a6fe9ed4
JM
2092 /* Convert a possibly large offset to a signed value within the
2093 range of the target address space. */
372d6395 2094 address_mode = get_address_mode (memref);
d4ebfa65 2095 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2096 if (HOST_BITS_PER_WIDE_INT > pbits)
2097 {
2098 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2099 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2100 >> shift);
2101 }
2102
5ef0b50d 2103 if (adjust_address)
4a78c787
RH
2104 {
2105 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2106 object, we can merge it into the LO_SUM. */
2107 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2108 && offset >= 0
2109 && (unsigned HOST_WIDE_INT) offset
2110 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2111 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2112 plus_constant (address_mode,
2113 XEXP (addr, 1), offset));
0207fa90
EB
2114#ifdef POINTERS_EXTEND_UNSIGNED
2115 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2116 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2117 the fact that pointers are not allowed to overflow. */
2118 else if (POINTERS_EXTEND_UNSIGNED > 0
2119 && GET_CODE (addr) == ZERO_EXTEND
2120 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2121 && trunc_int_for_mode (offset, pointer_mode) == offset)
2122 addr = gen_rtx_ZERO_EXTEND (address_mode,
2123 plus_constant (pointer_mode,
2124 XEXP (addr, 0), offset));
2125#endif
4a78c787 2126 else
0a81f074 2127 addr = plus_constant (address_mode, addr, offset);
4a78c787 2128 }
823e3574 2129
60564289 2130 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472 2131
09efeca1
PB
2132 /* If the address is a REG, change_address_1 rightfully returns memref,
2133 but this would destroy memref's MEM_ATTRS. */
2134 if (new_rtx == memref && offset != 0)
2135 new_rtx = copy_rtx (new_rtx);
2136
5ef0b50d
EB
2137 /* Conservatively drop the object if we don't know where we start from. */
2138 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2139 {
2140 attrs.expr = NULL_TREE;
2141 attrs.alias = 0;
2142 }
2143
738cc472
RK
2144 /* Compute the new values of the memory attributes due to this adjustment.
2145 We add the offsets and update the alignment. */
754c3d5d 2146 if (attrs.offset_known_p)
5ef0b50d
EB
2147 {
2148 attrs.offset += offset;
2149
2150 /* Drop the object if the new left end is not within its bounds. */
2151 if (adjust_object && attrs.offset < 0)
2152 {
2153 attrs.expr = NULL_TREE;
2154 attrs.alias = 0;
2155 }
2156 }
738cc472 2157
03bf2c23
RK
2158 /* Compute the new alignment by taking the MIN of the alignment and the
2159 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2160 if zero. */
2161 if (offset != 0)
f12144dd
RS
2162 {
2163 max_align = (offset & -offset) * BITS_PER_UNIT;
2164 attrs.align = MIN (attrs.align, max_align);
2165 }
738cc472 2166
5f2cbd0d 2167 if (size)
754c3d5d 2168 {
5ef0b50d 2169 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2170 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2171 {
2172 attrs.expr = NULL_TREE;
2173 attrs.alias = 0;
2174 }
754c3d5d 2175 attrs.size_known_p = true;
5f2cbd0d 2176 attrs.size = size;
754c3d5d
RS
2177 }
2178 else if (attrs.size_known_p)
5ef0b50d 2179 {
5f2cbd0d 2180 gcc_assert (!adjust_object);
5ef0b50d 2181 attrs.size -= offset;
5f2cbd0d
RS
2182 /* ??? The store_by_pieces machinery generates negative sizes,
2183 so don't assert for that here. */
5ef0b50d 2184 }
10b76d73 2185
f12144dd 2186 set_mem_attrs (new_rtx, &attrs);
738cc472 2187
60564289 2188 return new_rtx;
f1ec5147
RK
2189}
2190
630036c6
JJ
2191/* Return a memory reference like MEMREF, but with its mode changed
2192 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2193 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2194 nonzero, the memory address is forced to be valid. */
2195
2196rtx
502b8322
AJ
2197adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2198 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2199{
2200 memref = change_address_1 (memref, VOIDmode, addr, validate);
5f2cbd0d 2201 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2202}
2203
8ac61af7
RK
2204/* Return a memory reference like MEMREF, but whose address is changed by
2205 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2206 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2207
2208rtx
502b8322 2209offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2210{
60564289 2211 rtx new_rtx, addr = XEXP (memref, 0);
f12144dd 2212 enum machine_mode address_mode;
754c3d5d 2213 struct mem_attrs attrs, *defattrs;
e3c8ea67 2214
f12144dd 2215 attrs = *get_mem_attrs (memref);
372d6395 2216 address_mode = get_address_mode (memref);
d4ebfa65 2217 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2218
68252e27 2219 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2220 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2221
2222 However, if we did go and rearrange things, we can wind up not
2223 being able to recognize the magic around pic_offset_table_rtx.
2224 This stuff is fragile, and is yet another example of why it is
2225 bad to expose PIC machinery too early. */
f12144dd
RS
2226 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2227 attrs.addrspace)
e3c8ea67
RH
2228 && GET_CODE (addr) == PLUS
2229 && XEXP (addr, 0) == pic_offset_table_rtx)
2230 {
2231 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2232 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2233 }
2234
60564289
KG
2235 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2236 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2237
fdb1c7b3 2238 /* If there are no changes, just return the original memory reference. */
60564289
KG
2239 if (new_rtx == memref)
2240 return new_rtx;
fdb1c7b3 2241
0d4903b8
RK
2242 /* Update the alignment to reflect the offset. Reset the offset, which
2243 we don't know. */
754c3d5d
RS
2244 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2245 attrs.offset_known_p = false;
2246 attrs.size_known_p = defattrs->size_known_p;
2247 attrs.size = defattrs->size;
f12144dd
RS
2248 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2249 set_mem_attrs (new_rtx, &attrs);
60564289 2250 return new_rtx;
0d4903b8 2251}
68252e27 2252
792760b9
RK
2253/* Return a memory reference like MEMREF, but with its address changed to
2254 ADDR. The caller is asserting that the actual piece of memory pointed
2255 to is the same, just the form of the address is being changed, such as
2256 by putting something into a register. */
2257
2258rtx
502b8322 2259replace_equiv_address (rtx memref, rtx addr)
792760b9 2260{
738cc472
RK
2261 /* change_address_1 copies the memory attribute structure without change
2262 and that's exactly what we want here. */
40c0668b 2263 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2264 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2265}
738cc472 2266
f1ec5147
RK
2267/* Likewise, but the reference is not required to be valid. */
2268
2269rtx
502b8322 2270replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2271{
f1ec5147
RK
2272 return change_address_1 (memref, VOIDmode, addr, 0);
2273}
e7dfe4bb
RH
2274
2275/* Return a memory reference like MEMREF, but with its mode widened to
2276 MODE and offset by OFFSET. This would be used by targets that e.g.
2277 cannot issue QImode memory operations and have to use SImode memory
2278 operations plus masking logic. */
2279
2280rtx
502b8322 2281widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2282{
5f2cbd0d 2283 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2284 struct mem_attrs attrs;
e7dfe4bb
RH
2285 unsigned int size = GET_MODE_SIZE (mode);
2286
fdb1c7b3 2287 /* If there are no changes, just return the original memory reference. */
60564289
KG
2288 if (new_rtx == memref)
2289 return new_rtx;
fdb1c7b3 2290
f12144dd
RS
2291 attrs = *get_mem_attrs (new_rtx);
2292
e7dfe4bb
RH
2293 /* If we don't know what offset we were at within the expression, then
2294 we can't know if we've overstepped the bounds. */
754c3d5d 2295 if (! attrs.offset_known_p)
f12144dd 2296 attrs.expr = NULL_TREE;
e7dfe4bb 2297
f12144dd 2298 while (attrs.expr)
e7dfe4bb 2299 {
f12144dd 2300 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2301 {
f12144dd
RS
2302 tree field = TREE_OPERAND (attrs.expr, 1);
2303 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2304
2305 if (! DECL_SIZE_UNIT (field))
2306 {
f12144dd 2307 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2308 break;
2309 }
2310
2311 /* Is the field at least as large as the access? If so, ok,
2312 otherwise strip back to the containing structure. */
03667700
RK
2313 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2314 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2315 && attrs.offset >= 0)
e7dfe4bb
RH
2316 break;
2317
44de5aeb 2318 if (! host_integerp (offset, 1))
e7dfe4bb 2319 {
f12144dd 2320 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2321 break;
2322 }
2323
f12144dd 2324 attrs.expr = TREE_OPERAND (attrs.expr, 0);
754c3d5d
RS
2325 attrs.offset += tree_low_cst (offset, 1);
2326 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2327 / BITS_PER_UNIT);
e7dfe4bb
RH
2328 }
2329 /* Similarly for the decl. */
f12144dd
RS
2330 else if (DECL_P (attrs.expr)
2331 && DECL_SIZE_UNIT (attrs.expr)
2332 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2333 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2334 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2335 break;
2336 else
2337 {
2338 /* The widened memory access overflows the expression, which means
2339 that it could alias another expression. Zap it. */
f12144dd 2340 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2341 break;
2342 }
2343 }
2344
f12144dd 2345 if (! attrs.expr)
754c3d5d 2346 attrs.offset_known_p = false;
e7dfe4bb
RH
2347
2348 /* The widened memory may alias other stuff, so zap the alias set. */
2349 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2350 attrs.alias = 0;
754c3d5d
RS
2351 attrs.size_known_p = true;
2352 attrs.size = size;
f12144dd 2353 set_mem_attrs (new_rtx, &attrs);
60564289 2354 return new_rtx;
e7dfe4bb 2355}
23b2ce53 2356\f
f6129d66
RH
2357/* A fake decl that is used as the MEM_EXPR of spill slots. */
2358static GTY(()) tree spill_slot_decl;
2359
3d7e23f6
RH
2360tree
2361get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2362{
2363 tree d = spill_slot_decl;
2364 rtx rd;
f12144dd 2365 struct mem_attrs attrs;
f6129d66 2366
3d7e23f6 2367 if (d || !force_build_p)
f6129d66
RH
2368 return d;
2369
c2255bc4
AH
2370 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2371 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2372 DECL_ARTIFICIAL (d) = 1;
2373 DECL_IGNORED_P (d) = 1;
2374 TREE_USED (d) = 1;
f6129d66
RH
2375 spill_slot_decl = d;
2376
2377 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2378 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2379 attrs = *mode_mem_attrs[(int) BLKmode];
2380 attrs.alias = new_alias_set ();
2381 attrs.expr = d;
2382 set_mem_attrs (rd, &attrs);
f6129d66
RH
2383 SET_DECL_RTL (d, rd);
2384
2385 return d;
2386}
2387
2388/* Given MEM, a result from assign_stack_local, fill in the memory
2389 attributes as appropriate for a register allocator spill slot.
2390 These slots are not aliasable by other memory. We arrange for
2391 them all to use a single MEM_EXPR, so that the aliasing code can
2392 work properly in the case of shared spill slots. */
2393
2394void
2395set_mem_attrs_for_spill (rtx mem)
2396{
f12144dd
RS
2397 struct mem_attrs attrs;
2398 rtx addr;
f6129d66 2399
f12144dd
RS
2400 attrs = *get_mem_attrs (mem);
2401 attrs.expr = get_spill_slot_decl (true);
2402 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2403 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2404
2405 /* We expect the incoming memory to be of the form:
2406 (mem:MODE (plus (reg sfp) (const_int offset)))
2407 with perhaps the plus missing for offset = 0. */
2408 addr = XEXP (mem, 0);
754c3d5d
RS
2409 attrs.offset_known_p = true;
2410 attrs.offset = 0;
f6129d66 2411 if (GET_CODE (addr) == PLUS
481683e1 2412 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2413 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2414
f12144dd 2415 set_mem_attrs (mem, &attrs);
f6129d66
RH
2416 MEM_NOTRAP_P (mem) = 1;
2417}
2418\f
23b2ce53
RS
2419/* Return a newly created CODE_LABEL rtx with a unique label number. */
2420
2421rtx
502b8322 2422gen_label_rtx (void)
23b2ce53 2423{
0dc36574 2424 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2425 NULL, label_num++, NULL);
23b2ce53
RS
2426}
2427\f
2428/* For procedure integration. */
2429
23b2ce53 2430/* Install new pointers to the first and last insns in the chain.
86fe05e0 2431 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2432 Used for an inline-procedure after copying the insn chain. */
2433
2434void
502b8322 2435set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2436{
86fe05e0
RK
2437 rtx insn;
2438
5936d944
JH
2439 set_first_insn (first);
2440 set_last_insn (last);
86fe05e0
RK
2441 cur_insn_uid = 0;
2442
b5b8b0ac
AO
2443 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2444 {
2445 int debug_count = 0;
2446
2447 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2448 cur_debug_insn_uid = 0;
2449
2450 for (insn = first; insn; insn = NEXT_INSN (insn))
2451 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2452 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2453 else
2454 {
2455 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2456 if (DEBUG_INSN_P (insn))
2457 debug_count++;
2458 }
2459
2460 if (debug_count)
2461 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2462 else
2463 cur_debug_insn_uid++;
2464 }
2465 else
2466 for (insn = first; insn; insn = NEXT_INSN (insn))
2467 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2468
2469 cur_insn_uid++;
23b2ce53 2470}
23b2ce53 2471\f
750c9258 2472/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2473 structure. This routine should only be called once. */
23b2ce53 2474
fd743bc1 2475static void
b4aaa77b 2476unshare_all_rtl_1 (rtx insn)
23b2ce53 2477{
d1b81779 2478 /* Unshare just about everything else. */
2c07f13b 2479 unshare_all_rtl_in_chain (insn);
750c9258 2480
23b2ce53
RS
2481 /* Make sure the addresses of stack slots found outside the insn chain
2482 (such as, in DECL_RTL of a variable) are not shared
2483 with the insn chain.
2484
2485 This special care is necessary when the stack slot MEM does not
2486 actually appear in the insn chain. If it does appear, its address
2487 is unshared from all else at that point. */
242b0ce6 2488 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2489}
2490
750c9258 2491/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2492 structure, again. This is a fairly expensive thing to do so it
2493 should be done sparingly. */
2494
2495void
502b8322 2496unshare_all_rtl_again (rtx insn)
d1b81779
GK
2497{
2498 rtx p;
624c87aa
RE
2499 tree decl;
2500
d1b81779 2501 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2502 if (INSN_P (p))
d1b81779
GK
2503 {
2504 reset_used_flags (PATTERN (p));
2505 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2506 if (CALL_P (p))
2507 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2508 }
624c87aa 2509
2d4aecb3 2510 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2511 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2512
624c87aa 2513 /* Make sure that virtual parameters are not shared. */
910ad8de 2514 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2515 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2516
2517 reset_used_flags (stack_slot_list);
2518
b4aaa77b 2519 unshare_all_rtl_1 (insn);
fd743bc1
PB
2520}
2521
c2924966 2522unsigned int
fd743bc1
PB
2523unshare_all_rtl (void)
2524{
b4aaa77b 2525 unshare_all_rtl_1 (get_insns ());
c2924966 2526 return 0;
d1b81779
GK
2527}
2528
ef330312 2529
2c07f13b
JH
2530/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2531 Recursively does the same for subexpressions. */
2532
2533static void
2534verify_rtx_sharing (rtx orig, rtx insn)
2535{
2536 rtx x = orig;
2537 int i;
2538 enum rtx_code code;
2539 const char *format_ptr;
2540
2541 if (x == 0)
2542 return;
2543
2544 code = GET_CODE (x);
2545
2546 /* These types may be freely shared. */
2547
2548 switch (code)
2549 {
2550 case REG:
0ca5af51
AO
2551 case DEBUG_EXPR:
2552 case VALUE:
d8116890 2553 CASE_CONST_ANY:
2c07f13b
JH
2554 case SYMBOL_REF:
2555 case LABEL_REF:
2556 case CODE_LABEL:
2557 case PC:
2558 case CC0:
3810076b 2559 case RETURN:
26898771 2560 case SIMPLE_RETURN:
2c07f13b 2561 case SCRATCH:
2c07f13b 2562 return;
3e89ed8d
JH
2563 /* SCRATCH must be shared because they represent distinct values. */
2564 case CLOBBER:
2565 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2566 return;
2567 break;
2c07f13b
JH
2568
2569 case CONST:
6fb5fa3c 2570 if (shared_const_p (orig))
2c07f13b
JH
2571 return;
2572 break;
2573
2574 case MEM:
2575 /* A MEM is allowed to be shared if its address is constant. */
2576 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2577 || reload_completed || reload_in_progress)
2578 return;
2579
2580 break;
2581
2582 default:
2583 break;
2584 }
2585
2586 /* This rtx may not be shared. If it has already been seen,
2587 replace it with a copy of itself. */
1a2caa7a 2588#ifdef ENABLE_CHECKING
2c07f13b
JH
2589 if (RTX_FLAG (x, used))
2590 {
ab532386 2591 error ("invalid rtl sharing found in the insn");
2c07f13b 2592 debug_rtx (insn);
ab532386 2593 error ("shared rtx");
2c07f13b 2594 debug_rtx (x);
ab532386 2595 internal_error ("internal consistency failure");
2c07f13b 2596 }
1a2caa7a
NS
2597#endif
2598 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2599
2c07f13b
JH
2600 RTX_FLAG (x, used) = 1;
2601
6614fd40 2602 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2603
2604 format_ptr = GET_RTX_FORMAT (code);
2605
2606 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2607 {
2608 switch (*format_ptr++)
2609 {
2610 case 'e':
2611 verify_rtx_sharing (XEXP (x, i), insn);
2612 break;
2613
2614 case 'E':
2615 if (XVEC (x, i) != NULL)
2616 {
2617 int j;
2618 int len = XVECLEN (x, i);
2619
2620 for (j = 0; j < len; j++)
2621 {
1a2caa7a
NS
2622 /* We allow sharing of ASM_OPERANDS inside single
2623 instruction. */
2c07f13b 2624 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2625 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2626 == ASM_OPERANDS))
2c07f13b
JH
2627 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2628 else
2629 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2630 }
2631 }
2632 break;
2633 }
2634 }
2635 return;
2636}
2637
ba228239 2638/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2639 sharing in between the subexpressions. */
2640
24e47c76 2641DEBUG_FUNCTION void
2c07f13b
JH
2642verify_rtl_sharing (void)
2643{
2644 rtx p;
2645
a222c01a
MM
2646 timevar_push (TV_VERIFY_RTL_SHARING);
2647
2c07f13b
JH
2648 for (p = get_insns (); p; p = NEXT_INSN (p))
2649 if (INSN_P (p))
2650 {
2651 reset_used_flags (PATTERN (p));
2652 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2653 if (CALL_P (p))
2654 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2954a813
KK
2655 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2656 {
2657 int i;
2658 rtx q, sequence = PATTERN (p);
2659
2660 for (i = 0; i < XVECLEN (sequence, 0); i++)
2661 {
2662 q = XVECEXP (sequence, 0, i);
2663 gcc_assert (INSN_P (q));
2664 reset_used_flags (PATTERN (q));
2665 reset_used_flags (REG_NOTES (q));
776bebcd
JJ
2666 if (CALL_P (q))
2667 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2954a813
KK
2668 }
2669 }
2c07f13b
JH
2670 }
2671
2672 for (p = get_insns (); p; p = NEXT_INSN (p))
2673 if (INSN_P (p))
2674 {
2675 verify_rtx_sharing (PATTERN (p), p);
2676 verify_rtx_sharing (REG_NOTES (p), p);
776bebcd
JJ
2677 if (CALL_P (p))
2678 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2c07f13b 2679 }
a222c01a
MM
2680
2681 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2682}
2683
d1b81779
GK
2684/* Go through all the RTL insn bodies and copy any invalid shared structure.
2685 Assumes the mark bits are cleared at entry. */
2686
2c07f13b
JH
2687void
2688unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2689{
2690 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2691 if (INSN_P (insn))
d1b81779
GK
2692 {
2693 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2694 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2695 if (CALL_P (insn))
2696 CALL_INSN_FUNCTION_USAGE (insn)
2697 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2698 }
2699}
2700
2d4aecb3 2701/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2702 shared. We never replace the DECL_RTLs themselves with a copy,
2703 but expressions mentioned into a DECL_RTL cannot be shared with
2704 expressions in the instruction stream.
2705
2706 Note that reload may convert pseudo registers into memories in-place.
2707 Pseudo registers are always shared, but MEMs never are. Thus if we
2708 reset the used flags on MEMs in the instruction stream, we must set
2709 them again on MEMs that appear in DECL_RTLs. */
2710
2d4aecb3 2711static void
5eb2a9f2 2712set_used_decls (tree blk)
2d4aecb3
AO
2713{
2714 tree t;
2715
2716 /* Mark decls. */
910ad8de 2717 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2718 if (DECL_RTL_SET_P (t))
5eb2a9f2 2719 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2720
2721 /* Now process sub-blocks. */
87caf699 2722 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2723 set_used_decls (t);
2d4aecb3
AO
2724}
2725
23b2ce53 2726/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2727 Recursively does the same for subexpressions. Uses
2728 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2729
2730rtx
502b8322 2731copy_rtx_if_shared (rtx orig)
23b2ce53 2732{
32b32b16
AP
2733 copy_rtx_if_shared_1 (&orig);
2734 return orig;
2735}
2736
ff954f39
AP
2737/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2738 use. Recursively does the same for subexpressions. */
2739
32b32b16
AP
2740static void
2741copy_rtx_if_shared_1 (rtx *orig1)
2742{
2743 rtx x;
b3694847
SS
2744 int i;
2745 enum rtx_code code;
32b32b16 2746 rtx *last_ptr;
b3694847 2747 const char *format_ptr;
23b2ce53 2748 int copied = 0;
32b32b16
AP
2749 int length;
2750
2751 /* Repeat is used to turn tail-recursion into iteration. */
2752repeat:
2753 x = *orig1;
23b2ce53
RS
2754
2755 if (x == 0)
32b32b16 2756 return;
23b2ce53
RS
2757
2758 code = GET_CODE (x);
2759
2760 /* These types may be freely shared. */
2761
2762 switch (code)
2763 {
2764 case REG:
0ca5af51
AO
2765 case DEBUG_EXPR:
2766 case VALUE:
d8116890 2767 CASE_CONST_ANY:
23b2ce53 2768 case SYMBOL_REF:
2c07f13b 2769 case LABEL_REF:
23b2ce53
RS
2770 case CODE_LABEL:
2771 case PC:
2772 case CC0:
276e0224 2773 case RETURN:
26898771 2774 case SIMPLE_RETURN:
23b2ce53 2775 case SCRATCH:
0f41302f 2776 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2777 return;
3e89ed8d
JH
2778 case CLOBBER:
2779 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2780 return;
2781 break;
23b2ce53 2782
b851ea09 2783 case CONST:
6fb5fa3c 2784 if (shared_const_p (x))
32b32b16 2785 return;
b851ea09
RK
2786 break;
2787
b5b8b0ac 2788 case DEBUG_INSN:
23b2ce53
RS
2789 case INSN:
2790 case JUMP_INSN:
2791 case CALL_INSN:
2792 case NOTE:
23b2ce53
RS
2793 case BARRIER:
2794 /* The chain of insns is not being copied. */
32b32b16 2795 return;
23b2ce53 2796
e9a25f70
JL
2797 default:
2798 break;
23b2ce53
RS
2799 }
2800
2801 /* This rtx may not be shared. If it has already been seen,
2802 replace it with a copy of itself. */
2803
2adc7f12 2804 if (RTX_FLAG (x, used))
23b2ce53 2805 {
aacd3885 2806 x = shallow_copy_rtx (x);
23b2ce53
RS
2807 copied = 1;
2808 }
2adc7f12 2809 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2810
2811 /* Now scan the subexpressions recursively.
2812 We can store any replaced subexpressions directly into X
2813 since we know X is not shared! Any vectors in X
2814 must be copied if X was copied. */
2815
2816 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2817 length = GET_RTX_LENGTH (code);
2818 last_ptr = NULL;
b8698a0f 2819
32b32b16 2820 for (i = 0; i < length; i++)
23b2ce53
RS
2821 {
2822 switch (*format_ptr++)
2823 {
2824 case 'e':
32b32b16
AP
2825 if (last_ptr)
2826 copy_rtx_if_shared_1 (last_ptr);
2827 last_ptr = &XEXP (x, i);
23b2ce53
RS
2828 break;
2829
2830 case 'E':
2831 if (XVEC (x, i) != NULL)
2832 {
b3694847 2833 int j;
f0722107 2834 int len = XVECLEN (x, i);
b8698a0f 2835
6614fd40
KH
2836 /* Copy the vector iff I copied the rtx and the length
2837 is nonzero. */
f0722107 2838 if (copied && len > 0)
8f985ec4 2839 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2840
5d3cc252 2841 /* Call recursively on all inside the vector. */
f0722107 2842 for (j = 0; j < len; j++)
32b32b16
AP
2843 {
2844 if (last_ptr)
2845 copy_rtx_if_shared_1 (last_ptr);
2846 last_ptr = &XVECEXP (x, i, j);
2847 }
23b2ce53
RS
2848 }
2849 break;
2850 }
2851 }
32b32b16
AP
2852 *orig1 = x;
2853 if (last_ptr)
2854 {
2855 orig1 = last_ptr;
2856 goto repeat;
2857 }
2858 return;
23b2ce53
RS
2859}
2860
76369a82 2861/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 2862
76369a82
NF
2863static void
2864mark_used_flags (rtx x, int flag)
23b2ce53 2865{
b3694847
SS
2866 int i, j;
2867 enum rtx_code code;
2868 const char *format_ptr;
32b32b16 2869 int length;
23b2ce53 2870
32b32b16
AP
2871 /* Repeat is used to turn tail-recursion into iteration. */
2872repeat:
23b2ce53
RS
2873 if (x == 0)
2874 return;
2875
2876 code = GET_CODE (x);
2877
9faa82d8 2878 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2879 for them. */
2880
2881 switch (code)
2882 {
2883 case REG:
0ca5af51
AO
2884 case DEBUG_EXPR:
2885 case VALUE:
d8116890 2886 CASE_CONST_ANY:
23b2ce53
RS
2887 case SYMBOL_REF:
2888 case CODE_LABEL:
2889 case PC:
2890 case CC0:
276e0224 2891 case RETURN:
26898771 2892 case SIMPLE_RETURN:
23b2ce53
RS
2893 return;
2894
b5b8b0ac 2895 case DEBUG_INSN:
23b2ce53
RS
2896 case INSN:
2897 case JUMP_INSN:
2898 case CALL_INSN:
2899 case NOTE:
2900 case LABEL_REF:
2901 case BARRIER:
2902 /* The chain of insns is not being copied. */
2903 return;
750c9258 2904
e9a25f70
JL
2905 default:
2906 break;
23b2ce53
RS
2907 }
2908
76369a82 2909 RTX_FLAG (x, used) = flag;
23b2ce53
RS
2910
2911 format_ptr = GET_RTX_FORMAT (code);
32b32b16 2912 length = GET_RTX_LENGTH (code);
b8698a0f 2913
32b32b16 2914 for (i = 0; i < length; i++)
23b2ce53
RS
2915 {
2916 switch (*format_ptr++)
2917 {
2918 case 'e':
32b32b16
AP
2919 if (i == length-1)
2920 {
2921 x = XEXP (x, i);
2922 goto repeat;
2923 }
76369a82 2924 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
2925 break;
2926
2927 case 'E':
2928 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 2929 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
2930 break;
2931 }
2932 }
2933}
2c07f13b 2934
76369a82 2935/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
2936 to look for shared sub-parts. */
2937
2938void
76369a82 2939reset_used_flags (rtx x)
2c07f13b 2940{
76369a82
NF
2941 mark_used_flags (x, 0);
2942}
2c07f13b 2943
76369a82
NF
2944/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2945 to look for shared sub-parts. */
2c07f13b 2946
76369a82
NF
2947void
2948set_used_flags (rtx x)
2949{
2950 mark_used_flags (x, 1);
2c07f13b 2951}
23b2ce53
RS
2952\f
2953/* Copy X if necessary so that it won't be altered by changes in OTHER.
2954 Return X or the rtx for the pseudo reg the value of X was copied into.
2955 OTHER must be valid as a SET_DEST. */
2956
2957rtx
502b8322 2958make_safe_from (rtx x, rtx other)
23b2ce53
RS
2959{
2960 while (1)
2961 switch (GET_CODE (other))
2962 {
2963 case SUBREG:
2964 other = SUBREG_REG (other);
2965 break;
2966 case STRICT_LOW_PART:
2967 case SIGN_EXTEND:
2968 case ZERO_EXTEND:
2969 other = XEXP (other, 0);
2970 break;
2971 default:
2972 goto done;
2973 }
2974 done:
3c0cb5de 2975 if ((MEM_P (other)
23b2ce53 2976 && ! CONSTANT_P (x)
f8cfc6aa 2977 && !REG_P (x)
23b2ce53 2978 && GET_CODE (x) != SUBREG)
f8cfc6aa 2979 || (REG_P (other)
23b2ce53
RS
2980 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2981 || reg_mentioned_p (other, x))))
2982 {
2983 rtx temp = gen_reg_rtx (GET_MODE (x));
2984 emit_move_insn (temp, x);
2985 return temp;
2986 }
2987 return x;
2988}
2989\f
2990/* Emission of insns (adding them to the doubly-linked list). */
2991
23b2ce53
RS
2992/* Return the last insn emitted, even if it is in a sequence now pushed. */
2993
2994rtx
502b8322 2995get_last_insn_anywhere (void)
23b2ce53
RS
2996{
2997 struct sequence_stack *stack;
5936d944
JH
2998 if (get_last_insn ())
2999 return get_last_insn ();
49ad7cfa 3000 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
3001 if (stack->last != 0)
3002 return stack->last;
3003 return 0;
3004}
3005
2a496e8b
JDA
3006/* Return the first nonnote insn emitted in current sequence or current
3007 function. This routine looks inside SEQUENCEs. */
3008
3009rtx
502b8322 3010get_first_nonnote_insn (void)
2a496e8b 3011{
5936d944 3012 rtx insn = get_insns ();
91373fe8
JDA
3013
3014 if (insn)
3015 {
3016 if (NOTE_P (insn))
3017 for (insn = next_insn (insn);
3018 insn && NOTE_P (insn);
3019 insn = next_insn (insn))
3020 continue;
3021 else
3022 {
2ca202e7 3023 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
3024 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3025 insn = XVECEXP (PATTERN (insn), 0, 0);
3026 }
3027 }
2a496e8b
JDA
3028
3029 return insn;
3030}
3031
3032/* Return the last nonnote insn emitted in current sequence or current
3033 function. This routine looks inside SEQUENCEs. */
3034
3035rtx
502b8322 3036get_last_nonnote_insn (void)
2a496e8b 3037{
5936d944 3038 rtx insn = get_last_insn ();
91373fe8
JDA
3039
3040 if (insn)
3041 {
3042 if (NOTE_P (insn))
3043 for (insn = previous_insn (insn);
3044 insn && NOTE_P (insn);
3045 insn = previous_insn (insn))
3046 continue;
3047 else
3048 {
2ca202e7 3049 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
3050 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3051 insn = XVECEXP (PATTERN (insn), 0,
3052 XVECLEN (PATTERN (insn), 0) - 1);
3053 }
3054 }
2a496e8b
JDA
3055
3056 return insn;
3057}
3058
b5b8b0ac
AO
3059/* Return the number of actual (non-debug) insns emitted in this
3060 function. */
3061
3062int
3063get_max_insn_count (void)
3064{
3065 int n = cur_insn_uid;
3066
3067 /* The table size must be stable across -g, to avoid codegen
3068 differences due to debug insns, and not be affected by
3069 -fmin-insn-uid, to avoid excessive table size and to simplify
3070 debugging of -fcompare-debug failures. */
3071 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3072 n -= cur_debug_insn_uid;
3073 else
3074 n -= MIN_NONDEBUG_INSN_UID;
3075
3076 return n;
3077}
3078
23b2ce53
RS
3079\f
3080/* Return the next insn. If it is a SEQUENCE, return the first insn
3081 of the sequence. */
3082
3083rtx
502b8322 3084next_insn (rtx insn)
23b2ce53 3085{
75547801
KG
3086 if (insn)
3087 {
3088 insn = NEXT_INSN (insn);
3089 if (insn && NONJUMP_INSN_P (insn)
3090 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3091 insn = XVECEXP (PATTERN (insn), 0, 0);
3092 }
23b2ce53 3093
75547801 3094 return insn;
23b2ce53
RS
3095}
3096
3097/* Return the previous insn. If it is a SEQUENCE, return the last insn
3098 of the sequence. */
3099
3100rtx
502b8322 3101previous_insn (rtx insn)
23b2ce53 3102{
75547801
KG
3103 if (insn)
3104 {
3105 insn = PREV_INSN (insn);
3106 if (insn && NONJUMP_INSN_P (insn)
3107 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3108 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3109 }
23b2ce53 3110
75547801 3111 return insn;
23b2ce53
RS
3112}
3113
3114/* Return the next insn after INSN that is not a NOTE. This routine does not
3115 look inside SEQUENCEs. */
3116
3117rtx
502b8322 3118next_nonnote_insn (rtx insn)
23b2ce53 3119{
75547801
KG
3120 while (insn)
3121 {
3122 insn = NEXT_INSN (insn);
3123 if (insn == 0 || !NOTE_P (insn))
3124 break;
3125 }
23b2ce53 3126
75547801 3127 return insn;
23b2ce53
RS
3128}
3129
1e211590
DD
3130/* Return the next insn after INSN that is not a NOTE, but stop the
3131 search before we enter another basic block. This routine does not
3132 look inside SEQUENCEs. */
3133
3134rtx
3135next_nonnote_insn_bb (rtx insn)
3136{
3137 while (insn)
3138 {
3139 insn = NEXT_INSN (insn);
3140 if (insn == 0 || !NOTE_P (insn))
3141 break;
3142 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3143 return NULL_RTX;
3144 }
3145
3146 return insn;
3147}
3148
23b2ce53
RS
3149/* Return the previous insn before INSN that is not a NOTE. This routine does
3150 not look inside SEQUENCEs. */
3151
3152rtx
502b8322 3153prev_nonnote_insn (rtx insn)
23b2ce53 3154{
75547801
KG
3155 while (insn)
3156 {
3157 insn = PREV_INSN (insn);
3158 if (insn == 0 || !NOTE_P (insn))
3159 break;
3160 }
23b2ce53 3161
75547801 3162 return insn;
23b2ce53
RS
3163}
3164
896aa4ea
DD
3165/* Return the previous insn before INSN that is not a NOTE, but stop
3166 the search before we enter another basic block. This routine does
3167 not look inside SEQUENCEs. */
3168
3169rtx
3170prev_nonnote_insn_bb (rtx insn)
3171{
3172 while (insn)
3173 {
3174 insn = PREV_INSN (insn);
3175 if (insn == 0 || !NOTE_P (insn))
3176 break;
3177 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3178 return NULL_RTX;
3179 }
3180
3181 return insn;
3182}
3183
b5b8b0ac
AO
3184/* Return the next insn after INSN that is not a DEBUG_INSN. This
3185 routine does not look inside SEQUENCEs. */
3186
3187rtx
3188next_nondebug_insn (rtx insn)
3189{
3190 while (insn)
3191 {
3192 insn = NEXT_INSN (insn);
3193 if (insn == 0 || !DEBUG_INSN_P (insn))
3194 break;
3195 }
3196
3197 return insn;
3198}
3199
3200/* Return the previous insn before INSN that is not a DEBUG_INSN.
3201 This routine does not look inside SEQUENCEs. */
3202
3203rtx
3204prev_nondebug_insn (rtx insn)
3205{
3206 while (insn)
3207 {
3208 insn = PREV_INSN (insn);
3209 if (insn == 0 || !DEBUG_INSN_P (insn))
3210 break;
3211 }
3212
3213 return insn;
3214}
3215
f0fc0803
JJ
3216/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3217 This routine does not look inside SEQUENCEs. */
3218
3219rtx
3220next_nonnote_nondebug_insn (rtx insn)
3221{
3222 while (insn)
3223 {
3224 insn = NEXT_INSN (insn);
3225 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3226 break;
3227 }
3228
3229 return insn;
3230}
3231
3232/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3233 This routine does not look inside SEQUENCEs. */
3234
3235rtx
3236prev_nonnote_nondebug_insn (rtx insn)
3237{
3238 while (insn)
3239 {
3240 insn = PREV_INSN (insn);
3241 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3242 break;
3243 }
3244
3245 return insn;
3246}
3247
23b2ce53
RS
3248/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3249 or 0, if there is none. This routine does not look inside
0f41302f 3250 SEQUENCEs. */
23b2ce53
RS
3251
3252rtx
502b8322 3253next_real_insn (rtx insn)
23b2ce53 3254{
75547801
KG
3255 while (insn)
3256 {
3257 insn = NEXT_INSN (insn);
3258 if (insn == 0 || INSN_P (insn))
3259 break;
3260 }
23b2ce53 3261
75547801 3262 return insn;
23b2ce53
RS
3263}
3264
3265/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3266 or 0, if there is none. This routine does not look inside
3267 SEQUENCEs. */
3268
3269rtx
502b8322 3270prev_real_insn (rtx insn)
23b2ce53 3271{
75547801
KG
3272 while (insn)
3273 {
3274 insn = PREV_INSN (insn);
3275 if (insn == 0 || INSN_P (insn))
3276 break;
3277 }
23b2ce53 3278
75547801 3279 return insn;
23b2ce53
RS
3280}
3281
ee960939
OH
3282/* Return the last CALL_INSN in the current list, or 0 if there is none.
3283 This routine does not look inside SEQUENCEs. */
3284
3285rtx
502b8322 3286last_call_insn (void)
ee960939
OH
3287{
3288 rtx insn;
3289
3290 for (insn = get_last_insn ();
4b4bf941 3291 insn && !CALL_P (insn);
ee960939
OH
3292 insn = PREV_INSN (insn))
3293 ;
3294
3295 return insn;
3296}
3297
23b2ce53 3298/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3299 does not look inside SEQUENCEs. After reload this also skips over
3300 standalone USE and CLOBBER insn. */
23b2ce53 3301
69732dcb 3302int
4f588890 3303active_insn_p (const_rtx insn)
69732dcb 3304{
4b4bf941
JQ
3305 return (CALL_P (insn) || JUMP_P (insn)
3306 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3307 && (! reload_completed
3308 || (GET_CODE (PATTERN (insn)) != USE
3309 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3310}
3311
23b2ce53 3312rtx
502b8322 3313next_active_insn (rtx insn)
23b2ce53 3314{
75547801
KG
3315 while (insn)
3316 {
3317 insn = NEXT_INSN (insn);
3318 if (insn == 0 || active_insn_p (insn))
3319 break;
3320 }
23b2ce53 3321
75547801 3322 return insn;
23b2ce53
RS
3323}
3324
3325/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3326 does not look inside SEQUENCEs. After reload this also skips over
3327 standalone USE and CLOBBER insn. */
23b2ce53
RS
3328
3329rtx
502b8322 3330prev_active_insn (rtx insn)
23b2ce53 3331{
75547801
KG
3332 while (insn)
3333 {
3334 insn = PREV_INSN (insn);
3335 if (insn == 0 || active_insn_p (insn))
3336 break;
3337 }
23b2ce53 3338
75547801 3339 return insn;
23b2ce53
RS
3340}
3341
3342/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3343
3344rtx
502b8322 3345next_label (rtx insn)
23b2ce53 3346{
75547801
KG
3347 while (insn)
3348 {
3349 insn = NEXT_INSN (insn);
3350 if (insn == 0 || LABEL_P (insn))
3351 break;
3352 }
23b2ce53 3353
75547801 3354 return insn;
23b2ce53
RS
3355}
3356
dc0ff1c8
BS
3357/* Return the last label to mark the same position as LABEL. Return LABEL
3358 itself if it is null or any return rtx. */
6c2511d3
RS
3359
3360rtx
3361skip_consecutive_labels (rtx label)
3362{
3363 rtx insn;
3364
dc0ff1c8
BS
3365 if (label && ANY_RETURN_P (label))
3366 return label;
3367
6c2511d3
RS
3368 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3369 if (LABEL_P (insn))
3370 label = insn;
3371
3372 return label;
3373}
23b2ce53
RS
3374\f
3375#ifdef HAVE_cc0
c572e5ba
JVA
3376/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3377 and REG_CC_USER notes so we can find it. */
3378
3379void
502b8322 3380link_cc0_insns (rtx insn)
c572e5ba
JVA
3381{
3382 rtx user = next_nonnote_insn (insn);
3383
4b4bf941 3384 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3385 user = XVECEXP (PATTERN (user), 0, 0);
3386
65c5f2a6
ILT
3387 add_reg_note (user, REG_CC_SETTER, insn);
3388 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3389}
3390
23b2ce53
RS
3391/* Return the next insn that uses CC0 after INSN, which is assumed to
3392 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3393 applied to the result of this function should yield INSN).
3394
3395 Normally, this is simply the next insn. However, if a REG_CC_USER note
3396 is present, it contains the insn that uses CC0.
3397
3398 Return 0 if we can't find the insn. */
3399
3400rtx
502b8322 3401next_cc0_user (rtx insn)
23b2ce53 3402{
906c4e36 3403 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3404
3405 if (note)
3406 return XEXP (note, 0);
3407
3408 insn = next_nonnote_insn (insn);
4b4bf941 3409 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3410 insn = XVECEXP (PATTERN (insn), 0, 0);
3411
2c3c49de 3412 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3413 return insn;
3414
3415 return 0;
3416}
3417
3418/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3419 note, it is the previous insn. */
3420
3421rtx
502b8322 3422prev_cc0_setter (rtx insn)
23b2ce53 3423{
906c4e36 3424 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3425
3426 if (note)
3427 return XEXP (note, 0);
3428
3429 insn = prev_nonnote_insn (insn);
5b0264cb 3430 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3431
3432 return insn;
3433}
3434#endif
e5bef2e4 3435
594f8779
RZ
3436#ifdef AUTO_INC_DEC
3437/* Find a RTX_AUTOINC class rtx which matches DATA. */
3438
3439static int
3440find_auto_inc (rtx *xp, void *data)
3441{
3442 rtx x = *xp;
5ead67f6 3443 rtx reg = (rtx) data;
594f8779
RZ
3444
3445 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3446 return 0;
3447
3448 switch (GET_CODE (x))
3449 {
3450 case PRE_DEC:
3451 case PRE_INC:
3452 case POST_DEC:
3453 case POST_INC:
3454 case PRE_MODIFY:
3455 case POST_MODIFY:
3456 if (rtx_equal_p (reg, XEXP (x, 0)))
3457 return 1;
3458 break;
3459
3460 default:
3461 gcc_unreachable ();
3462 }
3463 return -1;
3464}
3465#endif
3466
e5bef2e4
HB
3467/* Increment the label uses for all labels present in rtx. */
3468
3469static void
502b8322 3470mark_label_nuses (rtx x)
e5bef2e4 3471{
b3694847
SS
3472 enum rtx_code code;
3473 int i, j;
3474 const char *fmt;
e5bef2e4
HB
3475
3476 code = GET_CODE (x);
7537fc90 3477 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3478 LABEL_NUSES (XEXP (x, 0))++;
3479
3480 fmt = GET_RTX_FORMAT (code);
3481 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3482 {
3483 if (fmt[i] == 'e')
0fb7aeda 3484 mark_label_nuses (XEXP (x, i));
e5bef2e4 3485 else if (fmt[i] == 'E')
0fb7aeda 3486 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3487 mark_label_nuses (XVECEXP (x, i, j));
3488 }
3489}
3490
23b2ce53
RS
3491\f
3492/* Try splitting insns that can be split for better scheduling.
3493 PAT is the pattern which might split.
3494 TRIAL is the insn providing PAT.
cc2902df 3495 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3496
3497 If this routine succeeds in splitting, it returns the first or last
11147ebe 3498 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3499 returns TRIAL. If the insn to be returned can be split, it will be. */
3500
3501rtx
502b8322 3502try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3503{
3504 rtx before = PREV_INSN (trial);
3505 rtx after = NEXT_INSN (trial);
23b2ce53 3506 int has_barrier = 0;
4a8cae83 3507 rtx note, seq, tem;
6b24c259 3508 int probability;
599aedd9
RH
3509 rtx insn_last, insn;
3510 int njumps = 0;
6b24c259 3511
cd9c1ca8
RH
3512 /* We're not good at redistributing frame information. */
3513 if (RTX_FRAME_RELATED_P (trial))
3514 return trial;
3515
6b24c259
JH
3516 if (any_condjump_p (trial)
3517 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3518 split_branch_probability = INTVAL (XEXP (note, 0));
3519 probability = split_branch_probability;
3520
3521 seq = split_insns (pat, trial);
3522
3523 split_branch_probability = -1;
23b2ce53
RS
3524
3525 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3526 We may need to handle this specially. */
4b4bf941 3527 if (after && BARRIER_P (after))
23b2ce53
RS
3528 {
3529 has_barrier = 1;
3530 after = NEXT_INSN (after);
3531 }
3532
599aedd9
RH
3533 if (!seq)
3534 return trial;
3535
3536 /* Avoid infinite loop if any insn of the result matches
3537 the original pattern. */
3538 insn_last = seq;
3539 while (1)
23b2ce53 3540 {
599aedd9
RH
3541 if (INSN_P (insn_last)
3542 && rtx_equal_p (PATTERN (insn_last), pat))
3543 return trial;
3544 if (!NEXT_INSN (insn_last))
3545 break;
3546 insn_last = NEXT_INSN (insn_last);
3547 }
750c9258 3548
6fb5fa3c
DB
3549 /* We will be adding the new sequence to the function. The splitters
3550 may have introduced invalid RTL sharing, so unshare the sequence now. */
3551 unshare_all_rtl_in_chain (seq);
3552
599aedd9
RH
3553 /* Mark labels. */
3554 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3555 {
4b4bf941 3556 if (JUMP_P (insn))
599aedd9
RH
3557 {
3558 mark_jump_label (PATTERN (insn), insn, 0);
3559 njumps++;
3560 if (probability != -1
3561 && any_condjump_p (insn)
3562 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3563 {
599aedd9
RH
3564 /* We can preserve the REG_BR_PROB notes only if exactly
3565 one jump is created, otherwise the machine description
3566 is responsible for this step using
3567 split_branch_probability variable. */
5b0264cb 3568 gcc_assert (njumps == 1);
65c5f2a6 3569 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3570 }
599aedd9
RH
3571 }
3572 }
3573
3574 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3575 in SEQ and copy any additional information across. */
4b4bf941 3576 if (CALL_P (trial))
599aedd9
RH
3577 {
3578 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3579 if (CALL_P (insn))
599aedd9 3580 {
65712d5c
RS
3581 rtx next, *p;
3582
3583 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3584 target may have explicitly specified. */
3585 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3586 while (*p)
3587 p = &XEXP (*p, 1);
3588 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3589
3590 /* If the old call was a sibling call, the new one must
3591 be too. */
599aedd9 3592 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3593
3594 /* If the new call is the last instruction in the sequence,
3595 it will effectively replace the old call in-situ. Otherwise
3596 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3597 so that it comes immediately after the new call. */
3598 if (NEXT_INSN (insn))
65f3dedb
RS
3599 for (next = NEXT_INSN (trial);
3600 next && NOTE_P (next);
3601 next = NEXT_INSN (next))
3602 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3603 {
3604 remove_insn (next);
3605 add_insn_after (next, insn, NULL);
65f3dedb 3606 break;
65712d5c 3607 }
599aedd9
RH
3608 }
3609 }
4b5e8abe 3610
599aedd9
RH
3611 /* Copy notes, particularly those related to the CFG. */
3612 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3613 {
3614 switch (REG_NOTE_KIND (note))
3615 {
3616 case REG_EH_REGION:
1d65f45c 3617 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3618 break;
216183ce 3619
599aedd9
RH
3620 case REG_NORETURN:
3621 case REG_SETJMP:
0a35513e 3622 case REG_TM:
594f8779 3623 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3624 {
4b4bf941 3625 if (CALL_P (insn))
65c5f2a6 3626 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3627 }
599aedd9 3628 break;
d6e95df8 3629
599aedd9 3630 case REG_NON_LOCAL_GOTO:
594f8779 3631 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3632 {
4b4bf941 3633 if (JUMP_P (insn))
65c5f2a6 3634 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3635 }
599aedd9 3636 break;
e5bef2e4 3637
594f8779
RZ
3638#ifdef AUTO_INC_DEC
3639 case REG_INC:
3640 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3641 {
3642 rtx reg = XEXP (note, 0);
3643 if (!FIND_REG_INC_NOTE (insn, reg)
3644 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3645 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3646 }
3647 break;
3648#endif
3649
9a08d230
RH
3650 case REG_ARGS_SIZE:
3651 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3652 break;
3653
599aedd9
RH
3654 default:
3655 break;
23b2ce53 3656 }
599aedd9
RH
3657 }
3658
3659 /* If there are LABELS inside the split insns increment the
3660 usage count so we don't delete the label. */
cf7c4aa6 3661 if (INSN_P (trial))
599aedd9
RH
3662 {
3663 insn = insn_last;
3664 while (insn != NULL_RTX)
23b2ce53 3665 {
cf7c4aa6 3666 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3667 if (NONJUMP_INSN_P (insn))
599aedd9 3668 mark_label_nuses (PATTERN (insn));
23b2ce53 3669
599aedd9
RH
3670 insn = PREV_INSN (insn);
3671 }
23b2ce53
RS
3672 }
3673
5368224f 3674 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3675
3676 delete_insn (trial);
3677 if (has_barrier)
3678 emit_barrier_after (tem);
3679
3680 /* Recursively call try_split for each new insn created; by the
3681 time control returns here that insn will be fully split, so
3682 set LAST and continue from the insn after the one returned.
3683 We can't use next_active_insn here since AFTER may be a note.
3684 Ignore deleted insns, which can be occur if not optimizing. */
3685 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3686 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3687 tem = try_split (PATTERN (tem), tem, 1);
3688
3689 /* Return either the first or the last insn, depending on which was
3690 requested. */
3691 return last
5936d944 3692 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3693 : NEXT_INSN (before);
23b2ce53
RS
3694}
3695\f
3696/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3697 Store PATTERN in the pattern slots. */
23b2ce53
RS
3698
3699rtx
502b8322 3700make_insn_raw (rtx pattern)
23b2ce53 3701{
b3694847 3702 rtx insn;
23b2ce53 3703
1f8f4a0b 3704 insn = rtx_alloc (INSN);
23b2ce53 3705
43127294 3706 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3707 PATTERN (insn) = pattern;
3708 INSN_CODE (insn) = -1;
1632afca 3709 REG_NOTES (insn) = NULL;
5368224f 3710 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3711 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3712
47984720
NC
3713#ifdef ENABLE_RTL_CHECKING
3714 if (insn
2c3c49de 3715 && INSN_P (insn)
47984720
NC
3716 && (returnjump_p (insn)
3717 || (GET_CODE (insn) == SET
3718 && SET_DEST (insn) == pc_rtx)))
3719 {
d4ee4d25 3720 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3721 debug_rtx (insn);
3722 }
3723#endif
750c9258 3724
23b2ce53
RS
3725 return insn;
3726}
3727
b5b8b0ac
AO
3728/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3729
e4da1e17 3730static rtx
b5b8b0ac
AO
3731make_debug_insn_raw (rtx pattern)
3732{
3733 rtx insn;
3734
3735 insn = rtx_alloc (DEBUG_INSN);
3736 INSN_UID (insn) = cur_debug_insn_uid++;
3737 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3738 INSN_UID (insn) = cur_insn_uid++;
3739
3740 PATTERN (insn) = pattern;
3741 INSN_CODE (insn) = -1;
3742 REG_NOTES (insn) = NULL;
5368224f 3743 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3744 BLOCK_FOR_INSN (insn) = NULL;
3745
3746 return insn;
3747}
3748
2f937369 3749/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3750
e4da1e17 3751static rtx
502b8322 3752make_jump_insn_raw (rtx pattern)
23b2ce53 3753{
b3694847 3754 rtx insn;
23b2ce53 3755
4b1f5e8c 3756 insn = rtx_alloc (JUMP_INSN);
1632afca 3757 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3758
3759 PATTERN (insn) = pattern;
3760 INSN_CODE (insn) = -1;
1632afca
RS
3761 REG_NOTES (insn) = NULL;
3762 JUMP_LABEL (insn) = NULL;
5368224f 3763 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3764 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3765
3766 return insn;
3767}
aff507f4 3768
2f937369 3769/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3770
3771static rtx
502b8322 3772make_call_insn_raw (rtx pattern)
aff507f4 3773{
b3694847 3774 rtx insn;
aff507f4
RK
3775
3776 insn = rtx_alloc (CALL_INSN);
3777 INSN_UID (insn) = cur_insn_uid++;
3778
3779 PATTERN (insn) = pattern;
3780 INSN_CODE (insn) = -1;
aff507f4
RK
3781 REG_NOTES (insn) = NULL;
3782 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3783 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3784 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3785
3786 return insn;
3787}
23b2ce53
RS
3788\f
3789/* Add INSN to the end of the doubly-linked list.
3790 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3791
3792void
502b8322 3793add_insn (rtx insn)
23b2ce53 3794{
5936d944 3795 PREV_INSN (insn) = get_last_insn();
23b2ce53
RS
3796 NEXT_INSN (insn) = 0;
3797
5936d944
JH
3798 if (NULL != get_last_insn())
3799 NEXT_INSN (get_last_insn ()) = insn;
23b2ce53 3800
5936d944
JH
3801 if (NULL == get_insns ())
3802 set_first_insn (insn);
23b2ce53 3803
5936d944 3804 set_last_insn (insn);
23b2ce53
RS
3805}
3806
a0ae8e8d
RK
3807/* Add INSN into the doubly-linked list after insn AFTER. This and
3808 the next should be the only functions called to insert an insn once
ba213285 3809 delay slots have been filled since only they know how to update a
a0ae8e8d 3810 SEQUENCE. */
23b2ce53
RS
3811
3812void
6fb5fa3c 3813add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3814{
3815 rtx next = NEXT_INSN (after);
3816
5b0264cb 3817 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3818
23b2ce53
RS
3819 NEXT_INSN (insn) = next;
3820 PREV_INSN (insn) = after;
3821
3822 if (next)
3823 {
3824 PREV_INSN (next) = insn;
4b4bf941 3825 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3826 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3827 }
5936d944
JH
3828 else if (get_last_insn () == after)
3829 set_last_insn (insn);
23b2ce53
RS
3830 else
3831 {
49ad7cfa 3832 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3833 /* Scan all pending sequences too. */
3834 for (; stack; stack = stack->next)
3835 if (after == stack->last)
fef0509b
RK
3836 {
3837 stack->last = insn;
3838 break;
3839 }
a0ae8e8d 3840
5b0264cb 3841 gcc_assert (stack);
23b2ce53
RS
3842 }
3843
4b4bf941
JQ
3844 if (!BARRIER_P (after)
3845 && !BARRIER_P (insn)
3c030e88
JH
3846 && (bb = BLOCK_FOR_INSN (after)))
3847 {
3848 set_block_for_insn (insn, bb);
38c1593d 3849 if (INSN_P (insn))
6fb5fa3c 3850 df_insn_rescan (insn);
3c030e88 3851 /* Should not happen as first in the BB is always
a1f300c0 3852 either NOTE or LABEL. */
a813c111 3853 if (BB_END (bb) == after
3c030e88 3854 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3855 && !BARRIER_P (insn)
a38e7aa5 3856 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3857 BB_END (bb) = insn;
3c030e88
JH
3858 }
3859
23b2ce53 3860 NEXT_INSN (after) = insn;
4b4bf941 3861 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3862 {
3863 rtx sequence = PATTERN (after);
3864 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3865 }
3866}
3867
a0ae8e8d 3868/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3869 the previous should be the only functions called to insert an insn
3870 once delay slots have been filled since only they know how to
3871 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3872 bb from before. */
a0ae8e8d
RK
3873
3874void
6fb5fa3c 3875add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3876{
3877 rtx prev = PREV_INSN (before);
3878
5b0264cb 3879 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3880
a0ae8e8d
RK
3881 PREV_INSN (insn) = prev;
3882 NEXT_INSN (insn) = before;
3883
3884 if (prev)
3885 {
3886 NEXT_INSN (prev) = insn;
4b4bf941 3887 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3888 {
3889 rtx sequence = PATTERN (prev);
3890 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3891 }
3892 }
5936d944
JH
3893 else if (get_insns () == before)
3894 set_first_insn (insn);
a0ae8e8d
RK
3895 else
3896 {
49ad7cfa 3897 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3898 /* Scan all pending sequences too. */
3899 for (; stack; stack = stack->next)
3900 if (before == stack->first)
fef0509b
RK
3901 {
3902 stack->first = insn;
3903 break;
3904 }
a0ae8e8d 3905
5b0264cb 3906 gcc_assert (stack);
a0ae8e8d
RK
3907 }
3908
b8698a0f 3909 if (!bb
6fb5fa3c
DB
3910 && !BARRIER_P (before)
3911 && !BARRIER_P (insn))
3912 bb = BLOCK_FOR_INSN (before);
3913
3914 if (bb)
3c030e88
JH
3915 {
3916 set_block_for_insn (insn, bb);
38c1593d 3917 if (INSN_P (insn))
6fb5fa3c 3918 df_insn_rescan (insn);
5b0264cb 3919 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3920 LABEL. */
5b0264cb
NS
3921 gcc_assert (BB_HEAD (bb) != insn
3922 /* Avoid clobbering of structure when creating new BB. */
3923 || BARRIER_P (insn)
a38e7aa5 3924 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3925 }
3926
a0ae8e8d 3927 PREV_INSN (before) = insn;
4b4bf941 3928 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3929 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3930}
3931
6fb5fa3c
DB
3932
3933/* Replace insn with an deleted instruction note. */
3934
0ce2b299
EB
3935void
3936set_insn_deleted (rtx insn)
6fb5fa3c
DB
3937{
3938 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3939 PUT_CODE (insn, NOTE);
3940 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3941}
3942
3943
89e99eea
DB
3944/* Remove an insn from its doubly-linked list. This function knows how
3945 to handle sequences. */
3946void
502b8322 3947remove_insn (rtx insn)
89e99eea
DB
3948{
3949 rtx next = NEXT_INSN (insn);
3950 rtx prev = PREV_INSN (insn);
53c17031
JH
3951 basic_block bb;
3952
6fb5fa3c
DB
3953 /* Later in the code, the block will be marked dirty. */
3954 df_insn_delete (NULL, INSN_UID (insn));
3955
89e99eea
DB
3956 if (prev)
3957 {
3958 NEXT_INSN (prev) = next;
4b4bf941 3959 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3960 {
3961 rtx sequence = PATTERN (prev);
3962 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3963 }
3964 }
5936d944
JH
3965 else if (get_insns () == insn)
3966 {
fb9ef4c1
JH
3967 if (next)
3968 PREV_INSN (next) = NULL;
5936d944
JH
3969 set_first_insn (next);
3970 }
89e99eea
DB
3971 else
3972 {
49ad7cfa 3973 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3974 /* Scan all pending sequences too. */
3975 for (; stack; stack = stack->next)
3976 if (insn == stack->first)
3977 {
3978 stack->first = next;
3979 break;
3980 }
3981
5b0264cb 3982 gcc_assert (stack);
89e99eea
DB
3983 }
3984
3985 if (next)
3986 {
3987 PREV_INSN (next) = prev;
4b4bf941 3988 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3989 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3990 }
5936d944
JH
3991 else if (get_last_insn () == insn)
3992 set_last_insn (prev);
89e99eea
DB
3993 else
3994 {
49ad7cfa 3995 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3996 /* Scan all pending sequences too. */
3997 for (; stack; stack = stack->next)
3998 if (insn == stack->last)
3999 {
4000 stack->last = prev;
4001 break;
4002 }
4003
5b0264cb 4004 gcc_assert (stack);
89e99eea 4005 }
4b4bf941 4006 if (!BARRIER_P (insn)
53c17031
JH
4007 && (bb = BLOCK_FOR_INSN (insn)))
4008 {
4e0084e4 4009 if (NONDEBUG_INSN_P (insn))
6fb5fa3c 4010 df_set_bb_dirty (bb);
a813c111 4011 if (BB_HEAD (bb) == insn)
53c17031 4012 {
3bf1e984
RK
4013 /* Never ever delete the basic block note without deleting whole
4014 basic block. */
5b0264cb 4015 gcc_assert (!NOTE_P (insn));
a813c111 4016 BB_HEAD (bb) = next;
53c17031 4017 }
a813c111
SB
4018 if (BB_END (bb) == insn)
4019 BB_END (bb) = prev;
53c17031 4020 }
89e99eea
DB
4021}
4022
ee960939
OH
4023/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4024
4025void
502b8322 4026add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4027{
5b0264cb 4028 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4029
4030 /* Put the register usage information on the CALL. If there is already
4031 some usage information, put ours at the end. */
4032 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4033 {
4034 rtx link;
4035
4036 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4037 link = XEXP (link, 1))
4038 ;
4039
4040 XEXP (link, 1) = call_fusage;
4041 }
4042 else
4043 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4044}
4045
23b2ce53
RS
4046/* Delete all insns made since FROM.
4047 FROM becomes the new last instruction. */
4048
4049void
502b8322 4050delete_insns_since (rtx from)
23b2ce53
RS
4051{
4052 if (from == 0)
5936d944 4053 set_first_insn (0);
23b2ce53
RS
4054 else
4055 NEXT_INSN (from) = 0;
5936d944 4056 set_last_insn (from);
23b2ce53
RS
4057}
4058
5dab5552
MS
4059/* This function is deprecated, please use sequences instead.
4060
4061 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4062 The insns to be moved are those between FROM and TO.
4063 They are moved to a new position after the insn AFTER.
4064 AFTER must not be FROM or TO or any insn in between.
4065
4066 This function does not know about SEQUENCEs and hence should not be
4067 called after delay-slot filling has been done. */
4068
4069void
502b8322 4070reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53 4071{
4f8344eb
HPN
4072#ifdef ENABLE_CHECKING
4073 rtx x;
4074 for (x = from; x != to; x = NEXT_INSN (x))
4075 gcc_assert (after != x);
4076 gcc_assert (after != to);
4077#endif
4078
23b2ce53
RS
4079 /* Splice this bunch out of where it is now. */
4080 if (PREV_INSN (from))
4081 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4082 if (NEXT_INSN (to))
4083 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4084 if (get_last_insn () == to)
4085 set_last_insn (PREV_INSN (from));
4086 if (get_insns () == from)
4087 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4088
4089 /* Make the new neighbors point to it and it to them. */
4090 if (NEXT_INSN (after))
4091 PREV_INSN (NEXT_INSN (after)) = to;
4092
4093 NEXT_INSN (to) = NEXT_INSN (after);
4094 PREV_INSN (from) = after;
4095 NEXT_INSN (after) = from;
5936d944
JH
4096 if (after == get_last_insn())
4097 set_last_insn (to);
23b2ce53
RS
4098}
4099
3c030e88
JH
4100/* Same as function above, but take care to update BB boundaries. */
4101void
502b8322 4102reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
4103{
4104 rtx prev = PREV_INSN (from);
4105 basic_block bb, bb2;
4106
4107 reorder_insns_nobb (from, to, after);
4108
4b4bf941 4109 if (!BARRIER_P (after)
3c030e88
JH
4110 && (bb = BLOCK_FOR_INSN (after)))
4111 {
4112 rtx x;
6fb5fa3c 4113 df_set_bb_dirty (bb);
68252e27 4114
4b4bf941 4115 if (!BARRIER_P (from)
3c030e88
JH
4116 && (bb2 = BLOCK_FOR_INSN (from)))
4117 {
a813c111
SB
4118 if (BB_END (bb2) == to)
4119 BB_END (bb2) = prev;
6fb5fa3c 4120 df_set_bb_dirty (bb2);
3c030e88
JH
4121 }
4122
a813c111
SB
4123 if (BB_END (bb) == after)
4124 BB_END (bb) = to;
3c030e88
JH
4125
4126 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4127 if (!BARRIER_P (x))
63642d5a 4128 df_insn_change_bb (x, bb);
3c030e88
JH
4129 }
4130}
4131
23b2ce53 4132\f
2f937369
DM
4133/* Emit insn(s) of given code and pattern
4134 at a specified place within the doubly-linked list.
23b2ce53 4135
2f937369
DM
4136 All of the emit_foo global entry points accept an object
4137 X which is either an insn list or a PATTERN of a single
4138 instruction.
23b2ce53 4139
2f937369
DM
4140 There are thus a few canonical ways to generate code and
4141 emit it at a specific place in the instruction stream. For
4142 example, consider the instruction named SPOT and the fact that
4143 we would like to emit some instructions before SPOT. We might
4144 do it like this:
23b2ce53 4145
2f937369
DM
4146 start_sequence ();
4147 ... emit the new instructions ...
4148 insns_head = get_insns ();
4149 end_sequence ();
23b2ce53 4150
2f937369 4151 emit_insn_before (insns_head, SPOT);
23b2ce53 4152
2f937369
DM
4153 It used to be common to generate SEQUENCE rtl instead, but that
4154 is a relic of the past which no longer occurs. The reason is that
4155 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4156 generated would almost certainly die right after it was created. */
23b2ce53 4157
5f02387d
NF
4158static rtx
4159emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4160 rtx (*make_raw) (rtx))
23b2ce53 4161{
b3694847 4162 rtx insn;
23b2ce53 4163
5b0264cb 4164 gcc_assert (before);
2f937369
DM
4165
4166 if (x == NULL_RTX)
4167 return last;
4168
4169 switch (GET_CODE (x))
23b2ce53 4170 {
b5b8b0ac 4171 case DEBUG_INSN:
2f937369
DM
4172 case INSN:
4173 case JUMP_INSN:
4174 case CALL_INSN:
4175 case CODE_LABEL:
4176 case BARRIER:
4177 case NOTE:
4178 insn = x;
4179 while (insn)
4180 {
4181 rtx next = NEXT_INSN (insn);
6fb5fa3c 4182 add_insn_before (insn, before, bb);
2f937369
DM
4183 last = insn;
4184 insn = next;
4185 }
4186 break;
4187
4188#ifdef ENABLE_RTL_CHECKING
4189 case SEQUENCE:
5b0264cb 4190 gcc_unreachable ();
2f937369
DM
4191 break;
4192#endif
4193
4194 default:
5f02387d 4195 last = (*make_raw) (x);
6fb5fa3c 4196 add_insn_before (last, before, bb);
2f937369 4197 break;
23b2ce53
RS
4198 }
4199
2f937369 4200 return last;
23b2ce53
RS
4201}
4202
5f02387d
NF
4203/* Make X be output before the instruction BEFORE. */
4204
4205rtx
4206emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4207{
4208 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4209}
4210
2f937369 4211/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4212 and output it before the instruction BEFORE. */
4213
4214rtx
a7102479 4215emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4216{
5f02387d
NF
4217 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4218 make_jump_insn_raw);
23b2ce53
RS
4219}
4220
2f937369 4221/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4222 and output it before the instruction BEFORE. */
4223
4224rtx
a7102479 4225emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4226{
5f02387d
NF
4227 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4228 make_call_insn_raw);
969d70ca
JH
4229}
4230
b5b8b0ac
AO
4231/* Make an instruction with body X and code DEBUG_INSN
4232 and output it before the instruction BEFORE. */
4233
4234rtx
4235emit_debug_insn_before_noloc (rtx x, rtx before)
4236{
5f02387d
NF
4237 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4238 make_debug_insn_raw);
b5b8b0ac
AO
4239}
4240
23b2ce53 4241/* Make an insn of code BARRIER
e881bb1b 4242 and output it before the insn BEFORE. */
23b2ce53
RS
4243
4244rtx
502b8322 4245emit_barrier_before (rtx before)
23b2ce53 4246{
b3694847 4247 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4248
4249 INSN_UID (insn) = cur_insn_uid++;
4250
6fb5fa3c 4251 add_insn_before (insn, before, NULL);
23b2ce53
RS
4252 return insn;
4253}
4254
e881bb1b
RH
4255/* Emit the label LABEL before the insn BEFORE. */
4256
4257rtx
502b8322 4258emit_label_before (rtx label, rtx before)
e881bb1b 4259{
468660d3
SB
4260 gcc_checking_assert (INSN_UID (label) == 0);
4261 INSN_UID (label) = cur_insn_uid++;
4262 add_insn_before (label, before, NULL);
e881bb1b
RH
4263 return label;
4264}
4265
23b2ce53
RS
4266/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4267
4268rtx
a38e7aa5 4269emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 4270{
b3694847 4271 rtx note = rtx_alloc (NOTE);
23b2ce53 4272 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4273 NOTE_KIND (note) = subtype;
ba4f7968 4274 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4275 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 4276
6fb5fa3c 4277 add_insn_before (note, before, NULL);
23b2ce53
RS
4278 return note;
4279}
4280\f
2f937369
DM
4281/* Helper for emit_insn_after, handles lists of instructions
4282 efficiently. */
23b2ce53 4283
2f937369 4284static rtx
6fb5fa3c 4285emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4286{
2f937369
DM
4287 rtx last;
4288 rtx after_after;
6fb5fa3c
DB
4289 if (!bb && !BARRIER_P (after))
4290 bb = BLOCK_FOR_INSN (after);
23b2ce53 4291
6fb5fa3c 4292 if (bb)
23b2ce53 4293 {
6fb5fa3c 4294 df_set_bb_dirty (bb);
2f937369 4295 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4296 if (!BARRIER_P (last))
6fb5fa3c
DB
4297 {
4298 set_block_for_insn (last, bb);
4299 df_insn_rescan (last);
4300 }
4b4bf941 4301 if (!BARRIER_P (last))
6fb5fa3c
DB
4302 {
4303 set_block_for_insn (last, bb);
4304 df_insn_rescan (last);
4305 }
a813c111
SB
4306 if (BB_END (bb) == after)
4307 BB_END (bb) = last;
23b2ce53
RS
4308 }
4309 else
2f937369
DM
4310 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4311 continue;
4312
4313 after_after = NEXT_INSN (after);
4314
4315 NEXT_INSN (after) = first;
4316 PREV_INSN (first) = after;
4317 NEXT_INSN (last) = after_after;
4318 if (after_after)
4319 PREV_INSN (after_after) = last;
4320
5936d944
JH
4321 if (after == get_last_insn())
4322 set_last_insn (last);
e855c69d 4323
2f937369
DM
4324 return last;
4325}
4326
5f02387d
NF
4327static rtx
4328emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4329 rtx (*make_raw)(rtx))
2f937369
DM
4330{
4331 rtx last = after;
4332
5b0264cb 4333 gcc_assert (after);
2f937369
DM
4334
4335 if (x == NULL_RTX)
4336 return last;
4337
4338 switch (GET_CODE (x))
23b2ce53 4339 {
b5b8b0ac 4340 case DEBUG_INSN:
2f937369
DM
4341 case INSN:
4342 case JUMP_INSN:
4343 case CALL_INSN:
4344 case CODE_LABEL:
4345 case BARRIER:
4346 case NOTE:
6fb5fa3c 4347 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4348 break;
4349
4350#ifdef ENABLE_RTL_CHECKING
4351 case SEQUENCE:
5b0264cb 4352 gcc_unreachable ();
2f937369
DM
4353 break;
4354#endif
4355
4356 default:
5f02387d 4357 last = (*make_raw) (x);
6fb5fa3c 4358 add_insn_after (last, after, bb);
2f937369 4359 break;
23b2ce53
RS
4360 }
4361
2f937369 4362 return last;
23b2ce53
RS
4363}
4364
5f02387d
NF
4365/* Make X be output after the insn AFTER and set the BB of insn. If
4366 BB is NULL, an attempt is made to infer the BB from AFTER. */
4367
4368rtx
4369emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4370{
4371 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4372}
4373
255680cf 4374
2f937369 4375/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4376 and output it after the insn AFTER. */
4377
4378rtx
a7102479 4379emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4380{
5f02387d 4381 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
2f937369
DM
4382}
4383
4384/* Make an instruction with body X and code CALL_INSN
4385 and output it after the instruction AFTER. */
4386
4387rtx
a7102479 4388emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4389{
5f02387d 4390 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4391}
4392
b5b8b0ac
AO
4393/* Make an instruction with body X and code CALL_INSN
4394 and output it after the instruction AFTER. */
4395
4396rtx
4397emit_debug_insn_after_noloc (rtx x, rtx after)
4398{
5f02387d 4399 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4400}
4401
23b2ce53
RS
4402/* Make an insn of code BARRIER
4403 and output it after the insn AFTER. */
4404
4405rtx
502b8322 4406emit_barrier_after (rtx after)
23b2ce53 4407{
b3694847 4408 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4409
4410 INSN_UID (insn) = cur_insn_uid++;
4411
6fb5fa3c 4412 add_insn_after (insn, after, NULL);
23b2ce53
RS
4413 return insn;
4414}
4415
4416/* Emit the label LABEL after the insn AFTER. */
4417
4418rtx
502b8322 4419emit_label_after (rtx label, rtx after)
23b2ce53 4420{
468660d3
SB
4421 gcc_checking_assert (INSN_UID (label) == 0);
4422 INSN_UID (label) = cur_insn_uid++;
4423 add_insn_after (label, after, NULL);
23b2ce53
RS
4424 return label;
4425}
4426
4427/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4428
4429rtx
a38e7aa5 4430emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4431{
b3694847 4432 rtx note = rtx_alloc (NOTE);
23b2ce53 4433 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4434 NOTE_KIND (note) = subtype;
ba4f7968 4435 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4436 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4437 add_insn_after (note, after, NULL);
23b2ce53
RS
4438 return note;
4439}
23b2ce53 4440\f
e8110d6f
NF
4441/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4442 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4443
4444static rtx
4445emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4446 rtx (*make_raw) (rtx))
0d682900 4447{
e8110d6f 4448 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4449
a7102479 4450 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4451 return last;
4452
2f937369
DM
4453 after = NEXT_INSN (after);
4454 while (1)
4455 {
5368224f
DC
4456 if (active_insn_p (after) && !INSN_LOCATION (after))
4457 INSN_LOCATION (after) = loc;
2f937369
DM
4458 if (after == last)
4459 break;
4460 after = NEXT_INSN (after);
4461 }
0d682900
JH
4462 return last;
4463}
4464
e8110d6f
NF
4465/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4466 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4467 any DEBUG_INSNs. */
4468
4469static rtx
4470emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4471 rtx (*make_raw) (rtx))
a7102479 4472{
b5b8b0ac
AO
4473 rtx prev = after;
4474
e8110d6f
NF
4475 if (skip_debug_insns)
4476 while (DEBUG_INSN_P (prev))
4477 prev = PREV_INSN (prev);
b5b8b0ac
AO
4478
4479 if (INSN_P (prev))
5368224f 4480 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4481 make_raw);
a7102479 4482 else
e8110d6f 4483 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4484}
4485
5368224f 4486/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
0d682900 4487rtx
e8110d6f 4488emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4489{
e8110d6f
NF
4490 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4491}
2f937369 4492
5368224f 4493/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
e8110d6f
NF
4494rtx
4495emit_insn_after (rtx pattern, rtx after)
4496{
4497 return emit_pattern_after (pattern, after, true, make_insn_raw);
4498}
dd3adcf8 4499
5368224f 4500/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
e8110d6f
NF
4501rtx
4502emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4503{
4504 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
0d682900
JH
4505}
4506
5368224f 4507/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
a7102479
JH
4508rtx
4509emit_jump_insn_after (rtx pattern, rtx after)
4510{
e8110d6f 4511 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
a7102479
JH
4512}
4513
5368224f 4514/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
0d682900 4515rtx
502b8322 4516emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4517{
e8110d6f 4518 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4519}
4520
5368224f 4521/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
a7102479
JH
4522rtx
4523emit_call_insn_after (rtx pattern, rtx after)
4524{
e8110d6f 4525 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4526}
4527
5368224f 4528/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
b5b8b0ac
AO
4529rtx
4530emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4531{
e8110d6f 4532 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4533}
4534
5368224f 4535/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
b5b8b0ac
AO
4536rtx
4537emit_debug_insn_after (rtx pattern, rtx after)
4538{
e8110d6f 4539 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4540}
4541
e8110d6f
NF
4542/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4543 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4544 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4545 CALL_INSN, etc. */
4546
4547static rtx
4548emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4549 rtx (*make_raw) (rtx))
0d682900
JH
4550{
4551 rtx first = PREV_INSN (before);
e8110d6f
NF
4552 rtx last = emit_pattern_before_noloc (pattern, before,
4553 insnp ? before : NULL_RTX,
4554 NULL, make_raw);
a7102479
JH
4555
4556 if (pattern == NULL_RTX || !loc)
4557 return last;
4558
26cb3993
JH
4559 if (!first)
4560 first = get_insns ();
4561 else
4562 first = NEXT_INSN (first);
a7102479
JH
4563 while (1)
4564 {
5368224f
DC
4565 if (active_insn_p (first) && !INSN_LOCATION (first))
4566 INSN_LOCATION (first) = loc;
a7102479
JH
4567 if (first == last)
4568 break;
4569 first = NEXT_INSN (first);
4570 }
4571 return last;
4572}
4573
e8110d6f
NF
4574/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4575 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4576 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4577 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4578
4579static rtx
4580emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4581 bool insnp, rtx (*make_raw) (rtx))
a7102479 4582{
b5b8b0ac
AO
4583 rtx next = before;
4584
e8110d6f
NF
4585 if (skip_debug_insns)
4586 while (DEBUG_INSN_P (next))
4587 next = PREV_INSN (next);
b5b8b0ac
AO
4588
4589 if (INSN_P (next))
5368224f 4590 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4591 insnp, make_raw);
a7102479 4592 else
e8110d6f
NF
4593 return emit_pattern_before_noloc (pattern, before,
4594 insnp ? before : NULL_RTX,
4595 NULL, make_raw);
a7102479
JH
4596}
4597
5368224f 4598/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
a7102479 4599rtx
e8110d6f 4600emit_insn_before_setloc (rtx pattern, rtx before, int loc)
a7102479 4601{
e8110d6f
NF
4602 return emit_pattern_before_setloc (pattern, before, loc, true,
4603 make_insn_raw);
4604}
a7102479 4605
5368224f 4606/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
e8110d6f
NF
4607rtx
4608emit_insn_before (rtx pattern, rtx before)
4609{
4610 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4611}
a7102479 4612
5368224f 4613/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
e8110d6f
NF
4614rtx
4615emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4616{
4617 return emit_pattern_before_setloc (pattern, before, loc, false,
4618 make_jump_insn_raw);
a7102479
JH
4619}
4620
5368224f 4621/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
a7102479
JH
4622rtx
4623emit_jump_insn_before (rtx pattern, rtx before)
4624{
e8110d6f
NF
4625 return emit_pattern_before (pattern, before, true, false,
4626 make_jump_insn_raw);
a7102479
JH
4627}
4628
5368224f 4629/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
a7102479
JH
4630rtx
4631emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4632{
e8110d6f
NF
4633 return emit_pattern_before_setloc (pattern, before, loc, false,
4634 make_call_insn_raw);
0d682900 4635}
a7102479 4636
e8110d6f 4637/* Like emit_call_insn_before_noloc,
5368224f 4638 but set insn_location according to BEFORE. */
a7102479
JH
4639rtx
4640emit_call_insn_before (rtx pattern, rtx before)
4641{
e8110d6f
NF
4642 return emit_pattern_before (pattern, before, true, false,
4643 make_call_insn_raw);
a7102479 4644}
b5b8b0ac 4645
5368224f 4646/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
b5b8b0ac
AO
4647rtx
4648emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4649{
e8110d6f
NF
4650 return emit_pattern_before_setloc (pattern, before, loc, false,
4651 make_debug_insn_raw);
b5b8b0ac
AO
4652}
4653
e8110d6f 4654/* Like emit_debug_insn_before_noloc,
5368224f 4655 but set insn_location according to BEFORE. */
b5b8b0ac
AO
4656rtx
4657emit_debug_insn_before (rtx pattern, rtx before)
4658{
e8110d6f
NF
4659 return emit_pattern_before (pattern, before, false, false,
4660 make_debug_insn_raw);
b5b8b0ac 4661}
0d682900 4662\f
2f937369
DM
4663/* Take X and emit it at the end of the doubly-linked
4664 INSN list.
23b2ce53
RS
4665
4666 Returns the last insn emitted. */
4667
4668rtx
502b8322 4669emit_insn (rtx x)
23b2ce53 4670{
5936d944 4671 rtx last = get_last_insn();
2f937369 4672 rtx insn;
23b2ce53 4673
2f937369
DM
4674 if (x == NULL_RTX)
4675 return last;
23b2ce53 4676
2f937369
DM
4677 switch (GET_CODE (x))
4678 {
b5b8b0ac 4679 case DEBUG_INSN:
2f937369
DM
4680 case INSN:
4681 case JUMP_INSN:
4682 case CALL_INSN:
4683 case CODE_LABEL:
4684 case BARRIER:
4685 case NOTE:
4686 insn = x;
4687 while (insn)
23b2ce53 4688 {
2f937369 4689 rtx next = NEXT_INSN (insn);
23b2ce53 4690 add_insn (insn);
2f937369
DM
4691 last = insn;
4692 insn = next;
23b2ce53 4693 }
2f937369 4694 break;
23b2ce53 4695
2f937369
DM
4696#ifdef ENABLE_RTL_CHECKING
4697 case SEQUENCE:
5b0264cb 4698 gcc_unreachable ();
2f937369
DM
4699 break;
4700#endif
23b2ce53 4701
2f937369
DM
4702 default:
4703 last = make_insn_raw (x);
4704 add_insn (last);
4705 break;
23b2ce53
RS
4706 }
4707
4708 return last;
4709}
4710
b5b8b0ac
AO
4711/* Make an insn of code DEBUG_INSN with pattern X
4712 and add it to the end of the doubly-linked list. */
4713
4714rtx
4715emit_debug_insn (rtx x)
4716{
5936d944 4717 rtx last = get_last_insn();
b5b8b0ac
AO
4718 rtx insn;
4719
4720 if (x == NULL_RTX)
4721 return last;
4722
4723 switch (GET_CODE (x))
4724 {
4725 case DEBUG_INSN:
4726 case INSN:
4727 case JUMP_INSN:
4728 case CALL_INSN:
4729 case CODE_LABEL:
4730 case BARRIER:
4731 case NOTE:
4732 insn = x;
4733 while (insn)
4734 {
4735 rtx next = NEXT_INSN (insn);
4736 add_insn (insn);
4737 last = insn;
4738 insn = next;
4739 }
4740 break;
4741
4742#ifdef ENABLE_RTL_CHECKING
4743 case SEQUENCE:
4744 gcc_unreachable ();
4745 break;
4746#endif
4747
4748 default:
4749 last = make_debug_insn_raw (x);
4750 add_insn (last);
4751 break;
4752 }
4753
4754 return last;
4755}
4756
2f937369
DM
4757/* Make an insn of code JUMP_INSN with pattern X
4758 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4759
4760rtx
502b8322 4761emit_jump_insn (rtx x)
23b2ce53 4762{
d950dee3 4763 rtx last = NULL_RTX, insn;
23b2ce53 4764
2f937369 4765 switch (GET_CODE (x))
23b2ce53 4766 {
b5b8b0ac 4767 case DEBUG_INSN:
2f937369
DM
4768 case INSN:
4769 case JUMP_INSN:
4770 case CALL_INSN:
4771 case CODE_LABEL:
4772 case BARRIER:
4773 case NOTE:
4774 insn = x;
4775 while (insn)
4776 {
4777 rtx next = NEXT_INSN (insn);
4778 add_insn (insn);
4779 last = insn;
4780 insn = next;
4781 }
4782 break;
e0a5c5eb 4783
2f937369
DM
4784#ifdef ENABLE_RTL_CHECKING
4785 case SEQUENCE:
5b0264cb 4786 gcc_unreachable ();
2f937369
DM
4787 break;
4788#endif
e0a5c5eb 4789
2f937369
DM
4790 default:
4791 last = make_jump_insn_raw (x);
4792 add_insn (last);
4793 break;
3c030e88 4794 }
e0a5c5eb
RS
4795
4796 return last;
4797}
4798
2f937369 4799/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4800 and add it to the end of the doubly-linked list. */
4801
4802rtx
502b8322 4803emit_call_insn (rtx x)
23b2ce53 4804{
2f937369
DM
4805 rtx insn;
4806
4807 switch (GET_CODE (x))
23b2ce53 4808 {
b5b8b0ac 4809 case DEBUG_INSN:
2f937369
DM
4810 case INSN:
4811 case JUMP_INSN:
4812 case CALL_INSN:
4813 case CODE_LABEL:
4814 case BARRIER:
4815 case NOTE:
4816 insn = emit_insn (x);
4817 break;
23b2ce53 4818
2f937369
DM
4819#ifdef ENABLE_RTL_CHECKING
4820 case SEQUENCE:
5b0264cb 4821 gcc_unreachable ();
2f937369
DM
4822 break;
4823#endif
23b2ce53 4824
2f937369
DM
4825 default:
4826 insn = make_call_insn_raw (x);
23b2ce53 4827 add_insn (insn);
2f937369 4828 break;
23b2ce53 4829 }
2f937369
DM
4830
4831 return insn;
23b2ce53
RS
4832}
4833
4834/* Add the label LABEL to the end of the doubly-linked list. */
4835
4836rtx
502b8322 4837emit_label (rtx label)
23b2ce53 4838{
468660d3
SB
4839 gcc_checking_assert (INSN_UID (label) == 0);
4840 INSN_UID (label) = cur_insn_uid++;
4841 add_insn (label);
23b2ce53
RS
4842 return label;
4843}
4844
4845/* Make an insn of code BARRIER
4846 and add it to the end of the doubly-linked list. */
4847
4848rtx
502b8322 4849emit_barrier (void)
23b2ce53 4850{
b3694847 4851 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4852 INSN_UID (barrier) = cur_insn_uid++;
4853 add_insn (barrier);
4854 return barrier;
4855}
4856
5f2fc772 4857/* Emit a copy of note ORIG. */
502b8322 4858
5f2fc772
NS
4859rtx
4860emit_note_copy (rtx orig)
4861{
4862 rtx note;
b8698a0f 4863
5f2fc772 4864 note = rtx_alloc (NOTE);
b8698a0f 4865
5f2fc772
NS
4866 INSN_UID (note) = cur_insn_uid++;
4867 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 4868 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
4869 BLOCK_FOR_INSN (note) = NULL;
4870 add_insn (note);
b8698a0f 4871
2e040219 4872 return note;
23b2ce53
RS
4873}
4874
2e040219
NS
4875/* Make an insn of code NOTE or type NOTE_NO
4876 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4877
4878rtx
a38e7aa5 4879emit_note (enum insn_note kind)
23b2ce53 4880{
b3694847 4881 rtx note;
23b2ce53 4882
23b2ce53
RS
4883 note = rtx_alloc (NOTE);
4884 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4885 NOTE_KIND (note) = kind;
dd107e66 4886 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4887 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4888 add_insn (note);
4889 return note;
4890}
4891
c41c1387
RS
4892/* Emit a clobber of lvalue X. */
4893
4894rtx
4895emit_clobber (rtx x)
4896{
4897 /* CONCATs should not appear in the insn stream. */
4898 if (GET_CODE (x) == CONCAT)
4899 {
4900 emit_clobber (XEXP (x, 0));
4901 return emit_clobber (XEXP (x, 1));
4902 }
4903 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4904}
4905
4906/* Return a sequence of insns to clobber lvalue X. */
4907
4908rtx
4909gen_clobber (rtx x)
4910{
4911 rtx seq;
4912
4913 start_sequence ();
4914 emit_clobber (x);
4915 seq = get_insns ();
4916 end_sequence ();
4917 return seq;
4918}
4919
4920/* Emit a use of rvalue X. */
4921
4922rtx
4923emit_use (rtx x)
4924{
4925 /* CONCATs should not appear in the insn stream. */
4926 if (GET_CODE (x) == CONCAT)
4927 {
4928 emit_use (XEXP (x, 0));
4929 return emit_use (XEXP (x, 1));
4930 }
4931 return emit_insn (gen_rtx_USE (VOIDmode, x));
4932}
4933
4934/* Return a sequence of insns to use rvalue X. */
4935
4936rtx
4937gen_use (rtx x)
4938{
4939 rtx seq;
4940
4941 start_sequence ();
4942 emit_use (x);
4943 seq = get_insns ();
4944 end_sequence ();
4945 return seq;
4946}
4947
87b47c85 4948/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4949 note of this type already exists, remove it first. */
87b47c85 4950
3d238248 4951rtx
502b8322 4952set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4953{
4954 rtx note = find_reg_note (insn, kind, NULL_RTX);
4955
52488da1
JW
4956 switch (kind)
4957 {
4958 case REG_EQUAL:
4959 case REG_EQUIV:
4960 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4961 has multiple sets (some callers assume single_set
4962 means the insn only has one set, when in fact it
4963 means the insn only has one * useful * set). */
4964 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4965 {
5b0264cb 4966 gcc_assert (!note);
52488da1
JW
4967 return NULL_RTX;
4968 }
4969
4970 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4971 It serves no useful purpose and breaks eliminate_regs. */
4972 if (GET_CODE (datum) == ASM_OPERANDS)
4973 return NULL_RTX;
6fb5fa3c
DB
4974
4975 if (note)
4976 {
4977 XEXP (note, 0) = datum;
4978 df_notes_rescan (insn);
4979 return note;
4980 }
52488da1
JW
4981 break;
4982
4983 default:
6fb5fa3c
DB
4984 if (note)
4985 {
4986 XEXP (note, 0) = datum;
4987 return note;
4988 }
52488da1
JW
4989 break;
4990 }
3d238248 4991
65c5f2a6 4992 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
4993
4994 switch (kind)
3d238248 4995 {
6fb5fa3c
DB
4996 case REG_EQUAL:
4997 case REG_EQUIV:
4998 df_notes_rescan (insn);
4999 break;
5000 default:
5001 break;
3d238248 5002 }
87b47c85 5003
3d238248 5004 return REG_NOTES (insn);
87b47c85 5005}
7543f918
JR
5006
5007/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5008rtx
5009set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5010{
5011 rtx set = single_set (insn);
5012
5013 if (set && SET_DEST (set) == dst)
5014 return set_unique_reg_note (insn, kind, datum);
5015 return NULL_RTX;
5016}
23b2ce53
RS
5017\f
5018/* Return an indication of which type of insn should have X as a body.
5019 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5020
d78db459 5021static enum rtx_code
502b8322 5022classify_insn (rtx x)
23b2ce53 5023{
4b4bf941 5024 if (LABEL_P (x))
23b2ce53
RS
5025 return CODE_LABEL;
5026 if (GET_CODE (x) == CALL)
5027 return CALL_INSN;
26898771 5028 if (ANY_RETURN_P (x))
23b2ce53
RS
5029 return JUMP_INSN;
5030 if (GET_CODE (x) == SET)
5031 {
5032 if (SET_DEST (x) == pc_rtx)
5033 return JUMP_INSN;
5034 else if (GET_CODE (SET_SRC (x)) == CALL)
5035 return CALL_INSN;
5036 else
5037 return INSN;
5038 }
5039 if (GET_CODE (x) == PARALLEL)
5040 {
b3694847 5041 int j;
23b2ce53
RS
5042 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5043 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5044 return CALL_INSN;
5045 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5046 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5047 return JUMP_INSN;
5048 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5049 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5050 return CALL_INSN;
5051 }
5052 return INSN;
5053}
5054
5055/* Emit the rtl pattern X as an appropriate kind of insn.
5056 If X is a label, it is simply added into the insn chain. */
5057
5058rtx
502b8322 5059emit (rtx x)
23b2ce53
RS
5060{
5061 enum rtx_code code = classify_insn (x);
5062
5b0264cb 5063 switch (code)
23b2ce53 5064 {
5b0264cb
NS
5065 case CODE_LABEL:
5066 return emit_label (x);
5067 case INSN:
5068 return emit_insn (x);
5069 case JUMP_INSN:
5070 {
5071 rtx insn = emit_jump_insn (x);
5072 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5073 return emit_barrier ();
5074 return insn;
5075 }
5076 case CALL_INSN:
5077 return emit_call_insn (x);
b5b8b0ac
AO
5078 case DEBUG_INSN:
5079 return emit_debug_insn (x);
5b0264cb
NS
5080 default:
5081 gcc_unreachable ();
23b2ce53 5082 }
23b2ce53
RS
5083}
5084\f
e2500fed 5085/* Space for free sequence stack entries. */
1431042e 5086static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5087
4dfa0342
RH
5088/* Begin emitting insns to a sequence. If this sequence will contain
5089 something that might cause the compiler to pop arguments to function
5090 calls (because those pops have previously been deferred; see
5091 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5092 before calling this function. That will ensure that the deferred
5093 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5094
5095void
502b8322 5096start_sequence (void)
23b2ce53
RS
5097{
5098 struct sequence_stack *tem;
5099
e2500fed
GK
5100 if (free_sequence_stack != NULL)
5101 {
5102 tem = free_sequence_stack;
5103 free_sequence_stack = tem->next;
5104 }
5105 else
a9429e29 5106 tem = ggc_alloc_sequence_stack ();
23b2ce53 5107
49ad7cfa 5108 tem->next = seq_stack;
5936d944
JH
5109 tem->first = get_insns ();
5110 tem->last = get_last_insn ();
23b2ce53 5111
49ad7cfa 5112 seq_stack = tem;
23b2ce53 5113
5936d944
JH
5114 set_first_insn (0);
5115 set_last_insn (0);
23b2ce53
RS
5116}
5117
5c7a310f
MM
5118/* Set up the insn chain starting with FIRST as the current sequence,
5119 saving the previously current one. See the documentation for
5120 start_sequence for more information about how to use this function. */
23b2ce53
RS
5121
5122void
502b8322 5123push_to_sequence (rtx first)
23b2ce53
RS
5124{
5125 rtx last;
5126
5127 start_sequence ();
5128
e84a58ff
EB
5129 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5130 ;
23b2ce53 5131
5936d944
JH
5132 set_first_insn (first);
5133 set_last_insn (last);
23b2ce53
RS
5134}
5135
bb27eeda
SE
5136/* Like push_to_sequence, but take the last insn as an argument to avoid
5137 looping through the list. */
5138
5139void
5140push_to_sequence2 (rtx first, rtx last)
5141{
5142 start_sequence ();
5143
5936d944
JH
5144 set_first_insn (first);
5145 set_last_insn (last);
bb27eeda
SE
5146}
5147
f15ae3a1
TW
5148/* Set up the outer-level insn chain
5149 as the current sequence, saving the previously current one. */
5150
5151void
502b8322 5152push_topmost_sequence (void)
f15ae3a1 5153{
aefdd5ab 5154 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5155
5156 start_sequence ();
5157
49ad7cfa 5158 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5159 top = stack;
5160
5936d944
JH
5161 set_first_insn (top->first);
5162 set_last_insn (top->last);
f15ae3a1
TW
5163}
5164
5165/* After emitting to the outer-level insn chain, update the outer-level
5166 insn chain, and restore the previous saved state. */
5167
5168void
502b8322 5169pop_topmost_sequence (void)
f15ae3a1 5170{
aefdd5ab 5171 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5172
49ad7cfa 5173 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5174 top = stack;
5175
5936d944
JH
5176 top->first = get_insns ();
5177 top->last = get_last_insn ();
f15ae3a1
TW
5178
5179 end_sequence ();
5180}
5181
23b2ce53
RS
5182/* After emitting to a sequence, restore previous saved state.
5183
5c7a310f 5184 To get the contents of the sequence just made, you must call
2f937369 5185 `get_insns' *before* calling here.
5c7a310f
MM
5186
5187 If the compiler might have deferred popping arguments while
5188 generating this sequence, and this sequence will not be immediately
5189 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5190 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5191 pops are inserted into this sequence, and not into some random
5192 location in the instruction stream. See INHIBIT_DEFER_POP for more
5193 information about deferred popping of arguments. */
23b2ce53
RS
5194
5195void
502b8322 5196end_sequence (void)
23b2ce53 5197{
49ad7cfa 5198 struct sequence_stack *tem = seq_stack;
23b2ce53 5199
5936d944
JH
5200 set_first_insn (tem->first);
5201 set_last_insn (tem->last);
49ad7cfa 5202 seq_stack = tem->next;
23b2ce53 5203
e2500fed
GK
5204 memset (tem, 0, sizeof (*tem));
5205 tem->next = free_sequence_stack;
5206 free_sequence_stack = tem;
23b2ce53
RS
5207}
5208
5209/* Return 1 if currently emitting into a sequence. */
5210
5211int
502b8322 5212in_sequence_p (void)
23b2ce53 5213{
49ad7cfa 5214 return seq_stack != 0;
23b2ce53 5215}
23b2ce53 5216\f
59ec66dc
MM
5217/* Put the various virtual registers into REGNO_REG_RTX. */
5218
2bbdec73 5219static void
bd60bab2 5220init_virtual_regs (void)
59ec66dc 5221{
bd60bab2
JH
5222 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5223 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5224 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5225 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5226 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5227 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5228 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5229}
5230
da43a810
BS
5231\f
5232/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5233static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5234static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5235static int copy_insn_n_scratches;
5236
5237/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5238 copied an ASM_OPERANDS.
5239 In that case, it is the original input-operand vector. */
5240static rtvec orig_asm_operands_vector;
5241
5242/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5243 copied an ASM_OPERANDS.
5244 In that case, it is the copied input-operand vector. */
5245static rtvec copy_asm_operands_vector;
5246
5247/* Likewise for the constraints vector. */
5248static rtvec orig_asm_constraints_vector;
5249static rtvec copy_asm_constraints_vector;
5250
5251/* Recursively create a new copy of an rtx for copy_insn.
5252 This function differs from copy_rtx in that it handles SCRATCHes and
5253 ASM_OPERANDs properly.
5254 Normally, this function is not used directly; use copy_insn as front end.
5255 However, you could first copy an insn pattern with copy_insn and then use
5256 this function afterwards to properly copy any REG_NOTEs containing
5257 SCRATCHes. */
5258
5259rtx
502b8322 5260copy_insn_1 (rtx orig)
da43a810 5261{
b3694847
SS
5262 rtx copy;
5263 int i, j;
5264 RTX_CODE code;
5265 const char *format_ptr;
da43a810 5266
cd9c1ca8
RH
5267 if (orig == NULL)
5268 return NULL;
5269
da43a810
BS
5270 code = GET_CODE (orig);
5271
5272 switch (code)
5273 {
5274 case REG:
a52a87c3 5275 case DEBUG_EXPR:
d8116890 5276 CASE_CONST_ANY:
da43a810
BS
5277 case SYMBOL_REF:
5278 case CODE_LABEL:
5279 case PC:
5280 case CC0:
276e0224 5281 case RETURN:
26898771 5282 case SIMPLE_RETURN:
da43a810 5283 return orig;
3e89ed8d
JH
5284 case CLOBBER:
5285 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5286 return orig;
5287 break;
da43a810
BS
5288
5289 case SCRATCH:
5290 for (i = 0; i < copy_insn_n_scratches; i++)
5291 if (copy_insn_scratch_in[i] == orig)
5292 return copy_insn_scratch_out[i];
5293 break;
5294
5295 case CONST:
6fb5fa3c 5296 if (shared_const_p (orig))
da43a810
BS
5297 return orig;
5298 break;
750c9258 5299
da43a810
BS
5300 /* A MEM with a constant address is not sharable. The problem is that
5301 the constant address may need to be reloaded. If the mem is shared,
5302 then reloading one copy of this mem will cause all copies to appear
5303 to have been reloaded. */
5304
5305 default:
5306 break;
5307 }
5308
aacd3885
RS
5309 /* Copy the various flags, fields, and other information. We assume
5310 that all fields need copying, and then clear the fields that should
da43a810
BS
5311 not be copied. That is the sensible default behavior, and forces
5312 us to explicitly document why we are *not* copying a flag. */
aacd3885 5313 copy = shallow_copy_rtx (orig);
da43a810
BS
5314
5315 /* We do not copy the USED flag, which is used as a mark bit during
5316 walks over the RTL. */
2adc7f12 5317 RTX_FLAG (copy, used) = 0;
da43a810
BS
5318
5319 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5320 if (INSN_P (orig))
da43a810 5321 {
2adc7f12
JJ
5322 RTX_FLAG (copy, jump) = 0;
5323 RTX_FLAG (copy, call) = 0;
5324 RTX_FLAG (copy, frame_related) = 0;
da43a810 5325 }
750c9258 5326
da43a810
BS
5327 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5328
5329 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5330 switch (*format_ptr++)
5331 {
5332 case 'e':
5333 if (XEXP (orig, i) != NULL)
5334 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5335 break;
da43a810 5336
aacd3885
RS
5337 case 'E':
5338 case 'V':
5339 if (XVEC (orig, i) == orig_asm_constraints_vector)
5340 XVEC (copy, i) = copy_asm_constraints_vector;
5341 else if (XVEC (orig, i) == orig_asm_operands_vector)
5342 XVEC (copy, i) = copy_asm_operands_vector;
5343 else if (XVEC (orig, i) != NULL)
5344 {
5345 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5346 for (j = 0; j < XVECLEN (copy, i); j++)
5347 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5348 }
5349 break;
da43a810 5350
aacd3885
RS
5351 case 't':
5352 case 'w':
5353 case 'i':
5354 case 's':
5355 case 'S':
5356 case 'u':
5357 case '0':
5358 /* These are left unchanged. */
5359 break;
da43a810 5360
aacd3885
RS
5361 default:
5362 gcc_unreachable ();
5363 }
da43a810
BS
5364
5365 if (code == SCRATCH)
5366 {
5367 i = copy_insn_n_scratches++;
5b0264cb 5368 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5369 copy_insn_scratch_in[i] = orig;
5370 copy_insn_scratch_out[i] = copy;
5371 }
5372 else if (code == ASM_OPERANDS)
5373 {
6462bb43
AO
5374 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5375 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5376 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5377 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5378 }
5379
5380 return copy;
5381}
5382
5383/* Create a new copy of an rtx.
5384 This function differs from copy_rtx in that it handles SCRATCHes and
5385 ASM_OPERANDs properly.
5386 INSN doesn't really have to be a full INSN; it could be just the
5387 pattern. */
5388rtx
502b8322 5389copy_insn (rtx insn)
da43a810
BS
5390{
5391 copy_insn_n_scratches = 0;
5392 orig_asm_operands_vector = 0;
5393 orig_asm_constraints_vector = 0;
5394 copy_asm_operands_vector = 0;
5395 copy_asm_constraints_vector = 0;
5396 return copy_insn_1 (insn);
5397}
59ec66dc 5398
8e383849
JR
5399/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5400 on that assumption that INSN itself remains in its original place. */
5401
5402rtx
5403copy_delay_slot_insn (rtx insn)
5404{
5405 /* Copy INSN with its rtx_code, all its notes, location etc. */
5406 insn = copy_rtx (insn);
5407 INSN_UID (insn) = cur_insn_uid++;
5408 return insn;
5409}
5410
23b2ce53
RS
5411/* Initialize data structures and variables in this file
5412 before generating rtl for each function. */
5413
5414void
502b8322 5415init_emit (void)
23b2ce53 5416{
5936d944
JH
5417 set_first_insn (NULL);
5418 set_last_insn (NULL);
b5b8b0ac
AO
5419 if (MIN_NONDEBUG_INSN_UID)
5420 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5421 else
5422 cur_insn_uid = 1;
5423 cur_debug_insn_uid = 1;
23b2ce53 5424 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5425 first_label_num = label_num;
49ad7cfa 5426 seq_stack = NULL;
23b2ce53 5427
23b2ce53
RS
5428 /* Init the tables that describe all the pseudo regs. */
5429
3e029763 5430 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5431
3e029763 5432 crtl->emit.regno_pointer_align
1b4572a8 5433 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5434
a9429e29 5435 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
0d4903b8 5436
e50126e8 5437 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5438 memcpy (regno_reg_rtx,
5fb0e246 5439 initial_regno_reg_rtx,
6cde4876 5440 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5441
23b2ce53 5442 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5443 init_virtual_regs ();
740ab4a2
RK
5444
5445 /* Indicate that the virtual registers and stack locations are
5446 all pointers. */
3502dc9c
JDA
5447 REG_POINTER (stack_pointer_rtx) = 1;
5448 REG_POINTER (frame_pointer_rtx) = 1;
5449 REG_POINTER (hard_frame_pointer_rtx) = 1;
5450 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5451
3502dc9c
JDA
5452 REG_POINTER (virtual_incoming_args_rtx) = 1;
5453 REG_POINTER (virtual_stack_vars_rtx) = 1;
5454 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5455 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5456 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5457
86fe05e0 5458#ifdef STACK_BOUNDARY
bdb429a5
RK
5459 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5460 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5461 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5462 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5463
5464 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5465 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5466 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5467 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5468 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5469#endif
5470
5e82e7bd
JVA
5471#ifdef INIT_EXPANDERS
5472 INIT_EXPANDERS;
5473#endif
23b2ce53
RS
5474}
5475
a73b091d 5476/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5477
5478static rtx
a73b091d 5479gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5480{
5481 rtx tem;
5482 rtvec v;
5483 int units, i;
5484 enum machine_mode inner;
5485
5486 units = GET_MODE_NUNITS (mode);
5487 inner = GET_MODE_INNER (mode);
5488
15ed7b52
JG
5489 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5490
69ef87e2
AH
5491 v = rtvec_alloc (units);
5492
a73b091d
JW
5493 /* We need to call this function after we set the scalar const_tiny_rtx
5494 entries. */
5495 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5496
5497 for (i = 0; i < units; ++i)
a73b091d 5498 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5499
a06e3c40 5500 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5501 return tem;
5502}
5503
a06e3c40 5504/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5505 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5506rtx
502b8322 5507gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5508{
a73b091d
JW
5509 enum machine_mode inner = GET_MODE_INNER (mode);
5510 int nunits = GET_MODE_NUNITS (mode);
5511 rtx x;
a06e3c40
R
5512 int i;
5513
a73b091d
JW
5514 /* Check to see if all of the elements have the same value. */
5515 x = RTVEC_ELT (v, nunits - 1);
5516 for (i = nunits - 2; i >= 0; i--)
5517 if (RTVEC_ELT (v, i) != x)
5518 break;
5519
5520 /* If the values are all the same, check to see if we can use one of the
5521 standard constant vectors. */
5522 if (i == -1)
5523 {
5524 if (x == CONST0_RTX (inner))
5525 return CONST0_RTX (mode);
5526 else if (x == CONST1_RTX (inner))
5527 return CONST1_RTX (mode);
e7c82a99
JJ
5528 else if (x == CONSTM1_RTX (inner))
5529 return CONSTM1_RTX (mode);
a73b091d
JW
5530 }
5531
5532 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5533}
5534
b5deb7b6
SL
5535/* Initialise global register information required by all functions. */
5536
5537void
5538init_emit_regs (void)
5539{
5540 int i;
1c3f523e
RS
5541 enum machine_mode mode;
5542 mem_attrs *attrs;
b5deb7b6
SL
5543
5544 /* Reset register attributes */
5545 htab_empty (reg_attrs_htab);
5546
5547 /* We need reg_raw_mode, so initialize the modes now. */
5548 init_reg_modes_target ();
5549
5550 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5551 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5552 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5553 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5554 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5555 virtual_incoming_args_rtx =
5556 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5557 virtual_stack_vars_rtx =
5558 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5559 virtual_stack_dynamic_rtx =
5560 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5561 virtual_outgoing_args_rtx =
5562 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5563 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5564 virtual_preferred_stack_boundary_rtx =
5565 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5566
5567 /* Initialize RTL for commonly used hard registers. These are
5568 copied into regno_reg_rtx as we begin to compile each function. */
5569 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5570 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5571
5572#ifdef RETURN_ADDRESS_POINTER_REGNUM
5573 return_address_pointer_rtx
5574 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5575#endif
5576
b5deb7b6
SL
5577 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5578 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5579 else
5580 pic_offset_table_rtx = NULL_RTX;
1c3f523e
RS
5581
5582 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5583 {
5584 mode = (enum machine_mode) i;
5585 attrs = ggc_alloc_cleared_mem_attrs ();
5586 attrs->align = BITS_PER_UNIT;
5587 attrs->addrspace = ADDR_SPACE_GENERIC;
5588 if (mode != BLKmode)
5589 {
754c3d5d
RS
5590 attrs->size_known_p = true;
5591 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5592 if (STRICT_ALIGNMENT)
5593 attrs->align = GET_MODE_ALIGNMENT (mode);
5594 }
5595 mode_mem_attrs[i] = attrs;
5596 }
b5deb7b6
SL
5597}
5598
2d888286 5599/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5600
5601void
2d888286 5602init_emit_once (void)
23b2ce53
RS
5603{
5604 int i;
5605 enum machine_mode mode;
9ec36da5 5606 enum machine_mode double_mode;
23b2ce53 5607
091a3ac7
CF
5608 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5609 hash tables. */
17211ab5
GK
5610 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5611 const_int_htab_eq, NULL);
173b24b9 5612
17211ab5
GK
5613 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5614 const_double_htab_eq, NULL);
5692c7bc 5615
091a3ac7
CF
5616 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5617 const_fixed_htab_eq, NULL);
5618
17211ab5
GK
5619 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5620 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5621 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5622 reg_attrs_htab_eq, NULL);
67673f5c 5623
43fa6302
AS
5624 /* Compute the word and byte modes. */
5625
5626 byte_mode = VOIDmode;
5627 word_mode = VOIDmode;
5628 double_mode = VOIDmode;
5629
15ed7b52
JG
5630 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5631 mode != VOIDmode;
43fa6302
AS
5632 mode = GET_MODE_WIDER_MODE (mode))
5633 {
5634 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5635 && byte_mode == VOIDmode)
5636 byte_mode = mode;
5637
5638 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5639 && word_mode == VOIDmode)
5640 word_mode = mode;
5641 }
5642
15ed7b52
JG
5643 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5644 mode != VOIDmode;
43fa6302
AS
5645 mode = GET_MODE_WIDER_MODE (mode))
5646 {
5647 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5648 && double_mode == VOIDmode)
5649 double_mode = mode;
5650 }
5651
5652 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5653
5da077de 5654#ifdef INIT_EXPANDERS
414c4dc4
NC
5655 /* This is to initialize {init|mark|free}_machine_status before the first
5656 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5657 end which calls push_function_context_to before the first call to
5da077de
AS
5658 init_function_start. */
5659 INIT_EXPANDERS;
5660#endif
5661
23b2ce53
RS
5662 /* Create the unique rtx's for certain rtx codes and operand values. */
5663
a2a8cc44 5664 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5665 tries to use these variables. */
23b2ce53 5666 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5667 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5668 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5669
68d75312
JC
5670 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5671 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5672 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5673 else
3b80f6ca 5674 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5675
5692c7bc
ZW
5676 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5677 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5678 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5679
5680 dconstm1 = dconst1;
5681 dconstm1.sign = 1;
03f2ea93
RS
5682
5683 dconsthalf = dconst1;
1e92bbb9 5684 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5685
e7c82a99 5686 for (i = 0; i < 3; i++)
23b2ce53 5687 {
aefa9d43 5688 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5689 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5690
15ed7b52
JG
5691 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5692 mode != VOIDmode;
5693 mode = GET_MODE_WIDER_MODE (mode))
5694 const_tiny_rtx[i][(int) mode] =
5695 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5696
5697 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5698 mode != VOIDmode;
23b2ce53 5699 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5700 const_tiny_rtx[i][(int) mode] =
5701 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5702
906c4e36 5703 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5704
15ed7b52
JG
5705 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5706 mode != VOIDmode;
23b2ce53 5707 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5708 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5709
ede6c734
MS
5710 for (mode = MIN_MODE_PARTIAL_INT;
5711 mode <= MAX_MODE_PARTIAL_INT;
5712 mode = (enum machine_mode)((int)(mode) + 1))
33d3e559 5713 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5714 }
5715
e7c82a99
JJ
5716 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5717
5718 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5719 mode != VOIDmode;
5720 mode = GET_MODE_WIDER_MODE (mode))
5721 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5722
ede6c734
MS
5723 for (mode = MIN_MODE_PARTIAL_INT;
5724 mode <= MAX_MODE_PARTIAL_INT;
5725 mode = (enum machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5726 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5727
e90721b1
AP
5728 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5729 mode != VOIDmode;
5730 mode = GET_MODE_WIDER_MODE (mode))
5731 {
5732 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5733 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5734 }
5735
5736 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5737 mode != VOIDmode;
5738 mode = GET_MODE_WIDER_MODE (mode))
5739 {
5740 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5741 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5742 }
5743
69ef87e2
AH
5744 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5745 mode != VOIDmode;
5746 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5747 {
5748 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5749 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 5750 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 5751 }
69ef87e2
AH
5752
5753 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5754 mode != VOIDmode;
5755 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5756 {
5757 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5758 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5759 }
69ef87e2 5760
325217ed
CF
5761 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5762 mode != VOIDmode;
5763 mode = GET_MODE_WIDER_MODE (mode))
5764 {
5765 FCONST0(mode).data.high = 0;
5766 FCONST0(mode).data.low = 0;
5767 FCONST0(mode).mode = mode;
091a3ac7
CF
5768 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5769 FCONST0 (mode), mode);
325217ed
CF
5770 }
5771
5772 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5773 mode != VOIDmode;
5774 mode = GET_MODE_WIDER_MODE (mode))
5775 {
5776 FCONST0(mode).data.high = 0;
5777 FCONST0(mode).data.low = 0;
5778 FCONST0(mode).mode = mode;
091a3ac7
CF
5779 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5780 FCONST0 (mode), mode);
325217ed
CF
5781 }
5782
5783 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5784 mode != VOIDmode;
5785 mode = GET_MODE_WIDER_MODE (mode))
5786 {
5787 FCONST0(mode).data.high = 0;
5788 FCONST0(mode).data.low = 0;
5789 FCONST0(mode).mode = mode;
091a3ac7
CF
5790 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5791 FCONST0 (mode), mode);
325217ed
CF
5792
5793 /* We store the value 1. */
5794 FCONST1(mode).data.high = 0;
5795 FCONST1(mode).data.low = 0;
5796 FCONST1(mode).mode = mode;
9be0ac8c
LC
5797 FCONST1(mode).data
5798 = double_int_one.lshift (GET_MODE_FBIT (mode),
5799 HOST_BITS_PER_DOUBLE_INT,
5800 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5801 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5802 FCONST1 (mode), mode);
325217ed
CF
5803 }
5804
5805 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5806 mode != VOIDmode;
5807 mode = GET_MODE_WIDER_MODE (mode))
5808 {
5809 FCONST0(mode).data.high = 0;
5810 FCONST0(mode).data.low = 0;
5811 FCONST0(mode).mode = mode;
091a3ac7
CF
5812 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5813 FCONST0 (mode), mode);
325217ed
CF
5814
5815 /* We store the value 1. */
5816 FCONST1(mode).data.high = 0;
5817 FCONST1(mode).data.low = 0;
5818 FCONST1(mode).mode = mode;
9be0ac8c
LC
5819 FCONST1(mode).data
5820 = double_int_one.lshift (GET_MODE_FBIT (mode),
5821 HOST_BITS_PER_DOUBLE_INT,
5822 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5823 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5824 FCONST1 (mode), mode);
5825 }
5826
5827 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5828 mode != VOIDmode;
5829 mode = GET_MODE_WIDER_MODE (mode))
5830 {
5831 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5832 }
5833
5834 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5835 mode != VOIDmode;
5836 mode = GET_MODE_WIDER_MODE (mode))
5837 {
5838 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5839 }
5840
5841 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5842 mode != VOIDmode;
5843 mode = GET_MODE_WIDER_MODE (mode))
5844 {
5845 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5846 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5847 }
5848
5849 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5850 mode != VOIDmode;
5851 mode = GET_MODE_WIDER_MODE (mode))
5852 {
5853 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5854 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5855 }
5856
dbbbbf3b
JDA
5857 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5858 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5859 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5860
f0417c82
RH
5861 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5862 if (STORE_FLAG_VALUE == 1)
5863 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91
RS
5864
5865 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5866 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5867 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5868 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
23b2ce53 5869}
a11759a3 5870\f
969d70ca
JH
5871/* Produce exact duplicate of insn INSN after AFTER.
5872 Care updating of libcall regions if present. */
5873
5874rtx
502b8322 5875emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5876{
60564289 5877 rtx new_rtx, link;
969d70ca
JH
5878
5879 switch (GET_CODE (insn))
5880 {
5881 case INSN:
60564289 5882 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5883 break;
5884
5885 case JUMP_INSN:
60564289 5886 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5887 break;
5888
b5b8b0ac
AO
5889 case DEBUG_INSN:
5890 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5891 break;
5892
969d70ca 5893 case CALL_INSN:
60564289 5894 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5895 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5896 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5897 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5898 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5899 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5900 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 5901 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 5902 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
5903 break;
5904
5905 default:
5b0264cb 5906 gcc_unreachable ();
969d70ca
JH
5907 }
5908
5909 /* Update LABEL_NUSES. */
60564289 5910 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 5911
5368224f 5912 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 5913
0a3d71f5
JW
5914 /* If the old insn is frame related, then so is the new one. This is
5915 primarily needed for IA-64 unwind info which marks epilogue insns,
5916 which may be duplicated by the basic block reordering code. */
60564289 5917 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 5918
cf7c4aa6
HPN
5919 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5920 will make them. REG_LABEL_TARGETs are created there too, but are
5921 supposed to be sticky, so we copy them. */
969d70ca 5922 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 5923 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
5924 {
5925 if (GET_CODE (link) == EXPR_LIST)
60564289 5926 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 5927 copy_insn_1 (XEXP (link, 0)));
969d70ca 5928 else
60564289 5929 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
5930 }
5931
60564289
KG
5932 INSN_CODE (new_rtx) = INSN_CODE (insn);
5933 return new_rtx;
969d70ca 5934}
e2500fed 5935
1431042e 5936static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5937rtx
5938gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5939{
5940 if (hard_reg_clobbers[mode][regno])
5941 return hard_reg_clobbers[mode][regno];
5942 else
5943 return (hard_reg_clobbers[mode][regno] =
5944 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5945}
5946
5368224f
DC
5947location_t prologue_location;
5948location_t epilogue_location;
78bde837
SB
5949
5950/* Hold current location information and last location information, so the
5951 datastructures are built lazily only when some instructions in given
5952 place are needed. */
5953static location_t curr_location, last_location;
78bde837 5954
5368224f 5955/* Allocate insn location datastructure. */
78bde837 5956void
5368224f 5957insn_locations_init (void)
78bde837 5958{
5368224f 5959 prologue_location = epilogue_location = 0;
78bde837
SB
5960 curr_location = UNKNOWN_LOCATION;
5961 last_location = UNKNOWN_LOCATION;
78bde837
SB
5962}
5963
5964/* At the end of emit stage, clear current location. */
5965void
5368224f 5966insn_locations_finalize (void)
78bde837 5967{
5368224f
DC
5968 epilogue_location = curr_location;
5969 curr_location = UNKNOWN_LOCATION;
78bde837
SB
5970}
5971
5972/* Set current location. */
5973void
5368224f 5974set_curr_insn_location (location_t location)
78bde837 5975{
78bde837
SB
5976 curr_location = location;
5977}
5978
5979/* Get current location. */
5980location_t
5368224f 5981curr_insn_location (void)
78bde837
SB
5982{
5983 return curr_location;
5984}
5985
78bde837
SB
5986/* Return lexical scope block insn belongs to. */
5987tree
5988insn_scope (const_rtx insn)
5989{
5368224f 5990 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
5991}
5992
5993/* Return line number of the statement that produced this insn. */
5994int
5995insn_line (const_rtx insn)
5996{
5368224f 5997 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
5998}
5999
6000/* Return source file of the statement that produced this insn. */
6001const char *
6002insn_file (const_rtx insn)
6003{
5368224f 6004 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6005}
8930883e
MK
6006
6007/* Return true if memory model MODEL requires a pre-operation (release-style)
6008 barrier or a post-operation (acquire-style) barrier. While not universal,
6009 this function matches behavior of several targets. */
6010
6011bool
6012need_atomic_barrier_p (enum memmodel model, bool pre)
6013{
6014 switch (model)
6015 {
6016 case MEMMODEL_RELAXED:
6017 case MEMMODEL_CONSUME:
6018 return false;
6019 case MEMMODEL_RELEASE:
6020 return pre;
6021 case MEMMODEL_ACQUIRE:
6022 return !pre;
6023 case MEMMODEL_ACQ_REL:
6024 case MEMMODEL_SEQ_CST:
6025 return true;
6026 default:
6027 gcc_unreachable ();
6028 }
6029}
6030\f
e2500fed 6031#include "gt-emit-rtl.h"