]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
Daily bump.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
d1e082c2 2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
718f9c0f 38#include "diagnostic-core.h"
23b2ce53 39#include "rtl.h"
a25c7971 40#include "tree.h"
6baf1cc8 41#include "tm_p.h"
23b2ce53
RS
42#include "flags.h"
43#include "function.h"
44#include "expr.h"
45#include "regs.h"
aff48bca 46#include "hard-reg-set.h"
c13e8210 47#include "hashtab.h"
23b2ce53 48#include "insn-config.h"
e9a25f70 49#include "recog.h"
0dfa1860 50#include "bitmap.h"
a05924f9 51#include "basic-block.h"
87ff9c8e 52#include "ggc.h"
e1772ac0 53#include "debug.h"
d23c55c2 54#include "langhooks.h"
6fb5fa3c 55#include "df.h"
b5b8b0ac 56#include "params.h"
d4ebfa65 57#include "target.h"
ca695ac9 58
5fb0e246
RS
59struct target_rtl default_target_rtl;
60#if SWITCHABLE_TARGET
61struct target_rtl *this_target_rtl = &default_target_rtl;
62#endif
63
64#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
65
1d445e9e
ILT
66/* Commonly used modes. */
67
0f41302f
MS
68enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
69enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 70enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 71enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 72
bd60bab2
JH
73/* Datastructures maintained for currently processed function in RTL form. */
74
3e029763 75struct rtl_data x_rtl;
bd60bab2
JH
76
77/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 78 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
79 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
80 with length attribute nested in top level structures. */
81
82rtx * regno_reg_rtx;
23b2ce53
RS
83
84/* This is *not* reset after each function. It gives each CODE_LABEL
85 in the entire compilation a unique label number. */
86
044b4de3 87static GTY(()) int label_num = 1;
23b2ce53 88
23b2ce53
RS
89/* We record floating-point CONST_DOUBLEs in each floating-point mode for
90 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
91 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
92 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 93
e7c82a99 94rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 95
68d75312
JC
96rtx const_true_rtx;
97
23b2ce53
RS
98REAL_VALUE_TYPE dconst0;
99REAL_VALUE_TYPE dconst1;
100REAL_VALUE_TYPE dconst2;
101REAL_VALUE_TYPE dconstm1;
03f2ea93 102REAL_VALUE_TYPE dconsthalf;
23b2ce53 103
325217ed
CF
104/* Record fixed-point constant 0 and 1. */
105FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
106FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
107
23b2ce53
RS
108/* We make one copy of (const_int C) where C is in
109 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
110 to save space during the compilation and simplify comparisons of
111 integers. */
112
5da077de 113rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 114
ca4adc91
RS
115/* Standard pieces of rtx, to be substituted directly into things. */
116rtx pc_rtx;
117rtx ret_rtx;
118rtx simple_return_rtx;
119rtx cc0_rtx;
120
c13e8210
MM
121/* A hash table storing CONST_INTs whose absolute value is greater
122 than MAX_SAVED_CONST_INT. */
123
e2500fed
GK
124static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
125 htab_t const_int_htab;
c13e8210 126
173b24b9 127/* A hash table storing memory attribute structures. */
e2500fed
GK
128static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
129 htab_t mem_attrs_htab;
173b24b9 130
a560d4d4
JH
131/* A hash table storing register attribute structures. */
132static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
133 htab_t reg_attrs_htab;
134
5692c7bc 135/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
136static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
137 htab_t const_double_htab;
5692c7bc 138
091a3ac7
CF
139/* A hash table storing all CONST_FIXEDs. */
140static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
141 htab_t const_fixed_htab;
142
3e029763 143#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 144#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 145#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 146
502b8322 147static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 148static void set_used_decls (tree);
502b8322
AJ
149static void mark_label_nuses (rtx);
150static hashval_t const_int_htab_hash (const void *);
151static int const_int_htab_eq (const void *, const void *);
152static hashval_t const_double_htab_hash (const void *);
153static int const_double_htab_eq (const void *, const void *);
154static rtx lookup_const_double (rtx);
091a3ac7
CF
155static hashval_t const_fixed_htab_hash (const void *);
156static int const_fixed_htab_eq (const void *, const void *);
157static rtx lookup_const_fixed (rtx);
502b8322
AJ
158static hashval_t mem_attrs_htab_hash (const void *);
159static int mem_attrs_htab_eq (const void *, const void *);
502b8322
AJ
160static hashval_t reg_attrs_htab_hash (const void *);
161static int reg_attrs_htab_eq (const void *, const void *);
162static reg_attrs *get_reg_attrs (tree, int);
a73b091d 163static rtx gen_const_vector (enum machine_mode, int);
32b32b16 164static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 165
6b24c259
JH
166/* Probability of the conditional branch currently proceeded by try_split.
167 Set to -1 otherwise. */
168int split_branch_probability = -1;
ca695ac9 169\f
c13e8210
MM
170/* Returns a hash code for X (which is a really a CONST_INT). */
171
172static hashval_t
502b8322 173const_int_htab_hash (const void *x)
c13e8210 174{
f7d504c2 175 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
176}
177
cc2902df 178/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
179 CONST_INT) is the same as that given by Y (which is really a
180 HOST_WIDE_INT *). */
181
182static int
502b8322 183const_int_htab_eq (const void *x, const void *y)
c13e8210 184{
f7d504c2 185 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
186}
187
188/* Returns a hash code for X (which is really a CONST_DOUBLE). */
189static hashval_t
502b8322 190const_double_htab_hash (const void *x)
5692c7bc 191{
f7d504c2 192 const_rtx const value = (const_rtx) x;
46b33600 193 hashval_t h;
5692c7bc 194
46b33600
RH
195 if (GET_MODE (value) == VOIDmode)
196 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
197 else
fe352c29 198 {
15c812e3 199 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
200 /* MODE is used in the comparison, so it should be in the hash. */
201 h ^= GET_MODE (value);
202 }
5692c7bc
ZW
203 return h;
204}
205
cc2902df 206/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
207 is the same as that represented by Y (really a ...) */
208static int
502b8322 209const_double_htab_eq (const void *x, const void *y)
5692c7bc 210{
f7d504c2 211 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
212
213 if (GET_MODE (a) != GET_MODE (b))
214 return 0;
8580f7a0
RH
215 if (GET_MODE (a) == VOIDmode)
216 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
217 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
218 else
219 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
220 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
221}
222
091a3ac7
CF
223/* Returns a hash code for X (which is really a CONST_FIXED). */
224
225static hashval_t
226const_fixed_htab_hash (const void *x)
227{
3101faab 228 const_rtx const value = (const_rtx) x;
091a3ac7
CF
229 hashval_t h;
230
231 h = fixed_hash (CONST_FIXED_VALUE (value));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h ^= GET_MODE (value);
234 return h;
235}
236
237/* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...). */
239
240static int
241const_fixed_htab_eq (const void *x, const void *y)
242{
3101faab 243 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
244
245 if (GET_MODE (a) != GET_MODE (b))
246 return 0;
247 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
248}
249
173b24b9
RK
250/* Returns a hash code for X (which is a really a mem_attrs *). */
251
252static hashval_t
502b8322 253mem_attrs_htab_hash (const void *x)
173b24b9 254{
f7d504c2 255 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
256
257 return (p->alias ^ (p->align * 1000)
09e881c9 258 ^ (p->addrspace * 4000)
754c3d5d
RS
259 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
260 ^ ((p->size_known_p ? p->size : 0) * 2500000)
78b76d08 261 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
262}
263
f12144dd 264/* Return true if the given memory attributes are equal. */
c13e8210 265
f12144dd
RS
266static bool
267mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 268{
754c3d5d
RS
269 return (p->alias == q->alias
270 && p->offset_known_p == q->offset_known_p
271 && (!p->offset_known_p || p->offset == q->offset)
272 && p->size_known_p == q->size_known_p
273 && (!p->size_known_p || p->size == q->size)
274 && p->align == q->align
09e881c9 275 && p->addrspace == q->addrspace
78b76d08
SB
276 && (p->expr == q->expr
277 || (p->expr != NULL_TREE && q->expr != NULL_TREE
278 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
279}
280
f12144dd
RS
281/* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
283 mem_attrs *). */
173b24b9 284
f12144dd
RS
285static int
286mem_attrs_htab_eq (const void *x, const void *y)
173b24b9 287{
f12144dd
RS
288 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
289}
173b24b9 290
f12144dd 291/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 292
f12144dd
RS
293static void
294set_mem_attrs (rtx mem, mem_attrs *attrs)
295{
296 void **slot;
297
298 /* If everything is the default, we can just clear the attributes. */
299 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
300 {
301 MEM_ATTRS (mem) = 0;
302 return;
303 }
173b24b9 304
f12144dd 305 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
173b24b9
RK
306 if (*slot == 0)
307 {
a9429e29 308 *slot = ggc_alloc_mem_attrs ();
f12144dd 309 memcpy (*slot, attrs, sizeof (mem_attrs));
173b24b9
RK
310 }
311
f12144dd 312 MEM_ATTRS (mem) = (mem_attrs *) *slot;
c13e8210
MM
313}
314
a560d4d4
JH
315/* Returns a hash code for X (which is a really a reg_attrs *). */
316
317static hashval_t
502b8322 318reg_attrs_htab_hash (const void *x)
a560d4d4 319{
741ac903 320 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4 321
9841210f 322 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
323}
324
6356f892 325/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
326 reg_attrs *) is the same as that given by Y (which is also really a
327 reg_attrs *). */
328
329static int
502b8322 330reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 331{
741ac903
KG
332 const reg_attrs *const p = (const reg_attrs *) x;
333 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
334
335 return (p->decl == q->decl && p->offset == q->offset);
336}
337/* Allocate a new reg_attrs structure and insert it into the hash table if
338 one identical to it is not already in the table. We are doing this for
339 MEM of mode MODE. */
340
341static reg_attrs *
502b8322 342get_reg_attrs (tree decl, int offset)
a560d4d4
JH
343{
344 reg_attrs attrs;
345 void **slot;
346
347 /* If everything is the default, we can just return zero. */
348 if (decl == 0 && offset == 0)
349 return 0;
350
351 attrs.decl = decl;
352 attrs.offset = offset;
353
354 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
355 if (*slot == 0)
356 {
a9429e29 357 *slot = ggc_alloc_reg_attrs ();
a560d4d4
JH
358 memcpy (*slot, &attrs, sizeof (reg_attrs));
359 }
360
1b4572a8 361 return (reg_attrs *) *slot;
a560d4d4
JH
362}
363
6fb5fa3c
DB
364
365#if !HAVE_blockage
adddc347
HPN
366/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
367 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
368
369rtx
370gen_blockage (void)
371{
372 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
373 MEM_VOLATILE_P (x) = true;
374 return x;
375}
376#endif
377
378
08394eef
BS
379/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
380 don't attempt to share with the various global pieces of rtl (such as
381 frame_pointer_rtx). */
382
383rtx
502b8322 384gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
385{
386 rtx x = gen_rtx_raw_REG (mode, regno);
387 ORIGINAL_REGNO (x) = regno;
388 return x;
389}
390
c5c76735
JL
391/* There are some RTL codes that require special attention; the generation
392 functions do the raw handling. If you add to this list, modify
393 special_rtx in gengenrtl.c as well. */
394
3b80f6ca 395rtx
502b8322 396gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 397{
c13e8210
MM
398 void **slot;
399
3b80f6ca 400 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 401 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
402
403#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
404 if (const_true_rtx && arg == STORE_FLAG_VALUE)
405 return const_true_rtx;
406#endif
407
c13e8210 408 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
409 slot = htab_find_slot_with_hash (const_int_htab, &arg,
410 (hashval_t) arg, INSERT);
29105cea 411 if (*slot == 0)
1f8f4a0b 412 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
413
414 return (rtx) *slot;
3b80f6ca
RH
415}
416
2496c7bd 417rtx
502b8322 418gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
419{
420 return GEN_INT (trunc_int_for_mode (c, mode));
421}
422
5692c7bc
ZW
423/* CONST_DOUBLEs might be created from pairs of integers, or from
424 REAL_VALUE_TYPEs. Also, their length is known only at run time,
425 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
426
427/* Determine whether REAL, a CONST_DOUBLE, already exists in the
428 hash table. If so, return its counterpart; otherwise add it
429 to the hash table and return it. */
430static rtx
502b8322 431lookup_const_double (rtx real)
5692c7bc
ZW
432{
433 void **slot = htab_find_slot (const_double_htab, real, INSERT);
434 if (*slot == 0)
435 *slot = real;
436
437 return (rtx) *slot;
438}
29105cea 439
5692c7bc
ZW
440/* Return a CONST_DOUBLE rtx for a floating-point value specified by
441 VALUE in mode MODE. */
0133b7d9 442rtx
502b8322 443const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 444{
5692c7bc
ZW
445 rtx real = rtx_alloc (CONST_DOUBLE);
446 PUT_MODE (real, mode);
447
9e254451 448 real->u.rv = value;
5692c7bc
ZW
449
450 return lookup_const_double (real);
451}
452
091a3ac7
CF
453/* Determine whether FIXED, a CONST_FIXED, already exists in the
454 hash table. If so, return its counterpart; otherwise add it
455 to the hash table and return it. */
456
457static rtx
458lookup_const_fixed (rtx fixed)
459{
460 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
461 if (*slot == 0)
462 *slot = fixed;
463
464 return (rtx) *slot;
465}
466
467/* Return a CONST_FIXED rtx for a fixed-point value specified by
468 VALUE in mode MODE. */
469
470rtx
471const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
472{
473 rtx fixed = rtx_alloc (CONST_FIXED);
474 PUT_MODE (fixed, mode);
475
476 fixed->u.fv = value;
477
478 return lookup_const_fixed (fixed);
479}
480
3e93ff81
AS
481/* Constructs double_int from rtx CST. */
482
483double_int
484rtx_to_double_int (const_rtx cst)
485{
486 double_int r;
487
488 if (CONST_INT_P (cst))
27bcd47c 489 r = double_int::from_shwi (INTVAL (cst));
48175537 490 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
491 {
492 r.low = CONST_DOUBLE_LOW (cst);
493 r.high = CONST_DOUBLE_HIGH (cst);
494 }
495 else
496 gcc_unreachable ();
497
498 return r;
499}
500
501
54fb1ae0
AS
502/* Return a CONST_DOUBLE or CONST_INT for a value specified as
503 a double_int. */
504
505rtx
506immed_double_int_const (double_int i, enum machine_mode mode)
507{
508 return immed_double_const (i.low, i.high, mode);
509}
510
5692c7bc
ZW
511/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
512 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 513 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
514 implied upper bits are copies of the high bit of i1. The value
515 itself is neither signed nor unsigned. Do not use this routine for
516 non-integer modes; convert to REAL_VALUE_TYPE and use
517 CONST_DOUBLE_FROM_REAL_VALUE. */
5692c7bc
ZW
518
519rtx
502b8322 520immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
521{
522 rtx value;
523 unsigned int i;
524
65acccdd 525 /* There are the following cases (note that there are no modes with
49ab6098 526 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
527
528 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
529 gen_int_mode.
929e10f4
MS
530 2) If the value of the integer fits into HOST_WIDE_INT anyway
531 (i.e., i1 consists only from copies of the sign bit, and sign
532 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 533 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
534 if (mode != VOIDmode)
535 {
5b0264cb
NS
536 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
537 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
538 /* We can get a 0 for an error mark. */
539 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
540 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 541
65acccdd
ZD
542 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
543 return gen_int_mode (i0, mode);
5692c7bc
ZW
544 }
545
546 /* If this integer fits in one word, return a CONST_INT. */
547 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
548 return GEN_INT (i0);
549
550 /* We use VOIDmode for integers. */
551 value = rtx_alloc (CONST_DOUBLE);
552 PUT_MODE (value, VOIDmode);
553
554 CONST_DOUBLE_LOW (value) = i0;
555 CONST_DOUBLE_HIGH (value) = i1;
556
557 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
558 XWINT (value, i) = 0;
559
560 return lookup_const_double (value);
0133b7d9
RH
561}
562
3b80f6ca 563rtx
502b8322 564gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
565{
566 /* In case the MD file explicitly references the frame pointer, have
567 all such references point to the same frame pointer. This is
568 used during frame pointer elimination to distinguish the explicit
569 references to these registers from pseudos that happened to be
570 assigned to them.
571
572 If we have eliminated the frame pointer or arg pointer, we will
573 be using it as a normal register, for example as a spill
574 register. In such cases, we might be accessing it in a mode that
575 is not Pmode and therefore cannot use the pre-allocated rtx.
576
577 Also don't do this when we are making new REGs in reload, since
578 we don't want to get confused with the real pointers. */
579
55a2c322 580 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 581 {
e10c79fe
LB
582 if (regno == FRAME_POINTER_REGNUM
583 && (!reload_completed || frame_pointer_needed))
3b80f6ca 584 return frame_pointer_rtx;
e3339d0f 585#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
586 if (regno == HARD_FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
588 return hard_frame_pointer_rtx;
589#endif
e3339d0f 590#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 591 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
592 return arg_pointer_rtx;
593#endif
594#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 595 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
596 return return_address_pointer_rtx;
597#endif
fc555370 598 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 599 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 600 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 601 return pic_offset_table_rtx;
bcb33994 602 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
603 return stack_pointer_rtx;
604 }
605
006a94b0 606#if 0
6cde4876 607 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
608 an existing entry in that table to avoid useless generation of RTL.
609
610 This code is disabled for now until we can fix the various backends
611 which depend on having non-shared hard registers in some cases. Long
612 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
613 on the amount of useless RTL that gets generated.
614
615 We'll also need to fix some code that runs after reload that wants to
616 set ORIGINAL_REGNO. */
617
6cde4876
JL
618 if (cfun
619 && cfun->emit
620 && regno_reg_rtx
621 && regno < FIRST_PSEUDO_REGISTER
622 && reg_raw_mode[regno] == mode)
623 return regno_reg_rtx[regno];
006a94b0 624#endif
6cde4876 625
08394eef 626 return gen_raw_REG (mode, regno);
3b80f6ca
RH
627}
628
41472af8 629rtx
502b8322 630gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
631{
632 rtx rt = gen_rtx_raw_MEM (mode, addr);
633
634 /* This field is not cleared by the mere allocation of the rtx, so
635 we clear it here. */
173b24b9 636 MEM_ATTRS (rt) = 0;
41472af8
MM
637
638 return rt;
639}
ddef6bc7 640
542a8afa
RH
641/* Generate a memory referring to non-trapping constant memory. */
642
643rtx
644gen_const_mem (enum machine_mode mode, rtx addr)
645{
646 rtx mem = gen_rtx_MEM (mode, addr);
647 MEM_READONLY_P (mem) = 1;
648 MEM_NOTRAP_P (mem) = 1;
649 return mem;
650}
651
bf877a76
R
652/* Generate a MEM referring to fixed portions of the frame, e.g., register
653 save areas. */
654
655rtx
656gen_frame_mem (enum machine_mode mode, rtx addr)
657{
658 rtx mem = gen_rtx_MEM (mode, addr);
659 MEM_NOTRAP_P (mem) = 1;
660 set_mem_alias_set (mem, get_frame_alias_set ());
661 return mem;
662}
663
664/* Generate a MEM referring to a temporary use of the stack, not part
665 of the fixed stack frame. For example, something which is pushed
666 by a target splitter. */
667rtx
668gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
669{
670 rtx mem = gen_rtx_MEM (mode, addr);
671 MEM_NOTRAP_P (mem) = 1;
e3b5732b 672 if (!cfun->calls_alloca)
bf877a76
R
673 set_mem_alias_set (mem, get_frame_alias_set ());
674 return mem;
675}
676
beb72684
RH
677/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
678 this construct would be valid, and false otherwise. */
679
680bool
681validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 682 const_rtx reg, unsigned int offset)
ddef6bc7 683{
beb72684
RH
684 unsigned int isize = GET_MODE_SIZE (imode);
685 unsigned int osize = GET_MODE_SIZE (omode);
686
687 /* All subregs must be aligned. */
688 if (offset % osize != 0)
689 return false;
690
691 /* The subreg offset cannot be outside the inner object. */
692 if (offset >= isize)
693 return false;
694
695 /* ??? This should not be here. Temporarily continue to allow word_mode
696 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
697 Generally, backends are doing something sketchy but it'll take time to
698 fix them all. */
699 if (omode == word_mode)
700 ;
701 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
702 is the culprit here, and not the backends. */
703 else if (osize >= UNITS_PER_WORD && isize >= osize)
704 ;
705 /* Allow component subregs of complex and vector. Though given the below
706 extraction rules, it's not always clear what that means. */
707 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
708 && GET_MODE_INNER (imode) == omode)
709 ;
710 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
711 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
712 represent this. It's questionable if this ought to be represented at
713 all -- why can't this all be hidden in post-reload splitters that make
714 arbitrarily mode changes to the registers themselves. */
715 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
716 ;
717 /* Subregs involving floating point modes are not allowed to
718 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
719 (subreg:SI (reg:DF) 0) isn't. */
720 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
721 {
55a2c322
VM
722 if (! (isize == osize
723 /* LRA can use subreg to store a floating point value in
724 an integer mode. Although the floating point and the
725 integer modes need the same number of hard registers,
726 the size of floating point mode can be less than the
727 integer mode. LRA also uses subregs for a register
728 should be used in different mode in on insn. */
729 || lra_in_progress))
beb72684
RH
730 return false;
731 }
ddef6bc7 732
beb72684
RH
733 /* Paradoxical subregs must have offset zero. */
734 if (osize > isize)
735 return offset == 0;
736
737 /* This is a normal subreg. Verify that the offset is representable. */
738
739 /* For hard registers, we already have most of these rules collected in
740 subreg_offset_representable_p. */
741 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742 {
743 unsigned int regno = REGNO (reg);
744
745#ifdef CANNOT_CHANGE_MODE_CLASS
746 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 && GET_MODE_INNER (imode) == omode)
748 ;
749 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
750 return false;
ddef6bc7 751#endif
beb72684
RH
752
753 return subreg_offset_representable_p (regno, imode, offset, omode);
754 }
755
756 /* For pseudo registers, we want most of the same checks. Namely:
757 If the register no larger than a word, the subreg must be lowpart.
758 If the register is larger than a word, the subreg must be the lowpart
759 of a subword. A subreg does *not* perform arbitrary bit extraction.
760 Given that we've already checked mode/offset alignment, we only have
761 to check subword subregs here. */
55a2c322
VM
762 if (osize < UNITS_PER_WORD
763 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684
RH
764 {
765 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
766 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
767 if (offset % UNITS_PER_WORD != low_off)
768 return false;
769 }
770 return true;
771}
772
773rtx
774gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775{
776 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 777 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
778}
779
173b24b9
RK
780/* Generate a SUBREG representing the least-significant part of REG if MODE
781 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782
ddef6bc7 783rtx
502b8322 784gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
785{
786 enum machine_mode inmode;
ddef6bc7
JJ
787
788 inmode = GET_MODE (reg);
789 if (inmode == VOIDmode)
790 inmode = mode;
e0e08ac2
JH
791 return gen_rtx_SUBREG (mode, reg,
792 subreg_lowpart_offset (mode, inmode));
ddef6bc7 793}
c5c76735 794\f
23b2ce53 795
80379f51
PB
796/* Create an rtvec and stores within it the RTXen passed in the arguments. */
797
23b2ce53 798rtvec
e34d07f2 799gen_rtvec (int n, ...)
23b2ce53 800{
80379f51
PB
801 int i;
802 rtvec rt_val;
e34d07f2 803 va_list p;
23b2ce53 804
e34d07f2 805 va_start (p, n);
23b2ce53 806
80379f51 807 /* Don't allocate an empty rtvec... */
23b2ce53 808 if (n == 0)
0edf1bb2
JL
809 {
810 va_end (p);
811 return NULL_RTVEC;
812 }
23b2ce53 813
80379f51 814 rt_val = rtvec_alloc (n);
4f90e4a0 815
23b2ce53 816 for (i = 0; i < n; i++)
80379f51 817 rt_val->elem[i] = va_arg (p, rtx);
6268b922 818
e34d07f2 819 va_end (p);
80379f51 820 return rt_val;
23b2ce53
RS
821}
822
823rtvec
502b8322 824gen_rtvec_v (int n, rtx *argp)
23b2ce53 825{
b3694847
SS
826 int i;
827 rtvec rt_val;
23b2ce53 828
80379f51 829 /* Don't allocate an empty rtvec... */
23b2ce53 830 if (n == 0)
80379f51 831 return NULL_RTVEC;
23b2ce53 832
80379f51 833 rt_val = rtvec_alloc (n);
23b2ce53
RS
834
835 for (i = 0; i < n; i++)
8f985ec4 836 rt_val->elem[i] = *argp++;
23b2ce53
RS
837
838 return rt_val;
839}
840\f
38ae7651
RS
841/* Return the number of bytes between the start of an OUTER_MODE
842 in-memory value and the start of an INNER_MODE in-memory value,
843 given that the former is a lowpart of the latter. It may be a
844 paradoxical lowpart, in which case the offset will be negative
845 on big-endian targets. */
846
847int
848byte_lowpart_offset (enum machine_mode outer_mode,
849 enum machine_mode inner_mode)
850{
851 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
852 return subreg_lowpart_offset (outer_mode, inner_mode);
853 else
854 return -subreg_lowpart_offset (inner_mode, outer_mode);
855}
856\f
23b2ce53
RS
857/* Generate a REG rtx for a new pseudo register of mode MODE.
858 This pseudo is assigned the next sequential register number. */
859
860rtx
502b8322 861gen_reg_rtx (enum machine_mode mode)
23b2ce53 862{
b3694847 863 rtx val;
2e3f842f 864 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 865
f8335a4f 866 gcc_assert (can_create_pseudo_p ());
23b2ce53 867
2e3f842f
L
868 /* If a virtual register with bigger mode alignment is generated,
869 increase stack alignment estimation because it might be spilled
870 to stack later. */
b8698a0f 871 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
872 && crtl->stack_alignment_estimated < align
873 && !crtl->stack_realign_processed)
ae58e548
JJ
874 {
875 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
876 if (crtl->stack_alignment_estimated < min_align)
877 crtl->stack_alignment_estimated = min_align;
878 }
2e3f842f 879
1b3d8f8a
GK
880 if (generating_concat_p
881 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
882 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
883 {
884 /* For complex modes, don't make a single pseudo.
885 Instead, make a CONCAT of two pseudos.
886 This allows noncontiguous allocation of the real and imaginary parts,
887 which makes much better code. Besides, allocating DCmode
888 pseudos overstrains reload on some machines like the 386. */
889 rtx realpart, imagpart;
27e58a70 890 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
891
892 realpart = gen_reg_rtx (partmode);
893 imagpart = gen_reg_rtx (partmode);
3b80f6ca 894 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
895 }
896
a560d4d4 897 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 898 enough to have an element for this pseudo reg number. */
23b2ce53 899
3e029763 900 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 901 {
3e029763 902 int old_size = crtl->emit.regno_pointer_align_length;
60564289 903 char *tmp;
0d4903b8 904 rtx *new1;
0d4903b8 905
60564289
KG
906 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
907 memset (tmp + old_size, 0, old_size);
908 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 909
1b4572a8 910 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 911 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
912 regno_reg_rtx = new1;
913
3e029763 914 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
915 }
916
08394eef 917 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
918 regno_reg_rtx[reg_rtx_no++] = val;
919 return val;
920}
921
a698cc03
JL
922/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
923
924bool
925reg_is_parm_p (rtx reg)
926{
927 tree decl;
928
929 gcc_assert (REG_P (reg));
930 decl = REG_EXPR (reg);
931 return (decl && TREE_CODE (decl) == PARM_DECL);
932}
933
38ae7651
RS
934/* Update NEW with the same attributes as REG, but with OFFSET added
935 to the REG_OFFSET. */
a560d4d4 936
e53a16e7 937static void
60564289 938update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 939{
60564289 940 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 941 REG_OFFSET (reg) + offset);
e53a16e7
ILT
942}
943
38ae7651
RS
944/* Generate a register with same attributes as REG, but with OFFSET
945 added to the REG_OFFSET. */
e53a16e7
ILT
946
947rtx
948gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
949 int offset)
950{
60564289 951 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 952
60564289
KG
953 update_reg_offset (new_rtx, reg, offset);
954 return new_rtx;
e53a16e7
ILT
955}
956
957/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 958 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
959
960rtx
961gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
962{
60564289 963 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 964
60564289
KG
965 update_reg_offset (new_rtx, reg, offset);
966 return new_rtx;
a560d4d4
JH
967}
968
38ae7651
RS
969/* Adjust REG in-place so that it has mode MODE. It is assumed that the
970 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
971
972void
38ae7651 973adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 974{
38ae7651
RS
975 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
976 PUT_MODE (reg, mode);
977}
978
979/* Copy REG's attributes from X, if X has any attributes. If REG and X
980 have different modes, REG is a (possibly paradoxical) lowpart of X. */
981
982void
983set_reg_attrs_from_value (rtx reg, rtx x)
984{
985 int offset;
de6f3f7a
L
986 bool can_be_reg_pointer = true;
987
988 /* Don't call mark_reg_pointer for incompatible pointer sign
989 extension. */
990 while (GET_CODE (x) == SIGN_EXTEND
991 || GET_CODE (x) == ZERO_EXTEND
992 || GET_CODE (x) == TRUNCATE
993 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
994 {
995#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
996 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
997 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
998 can_be_reg_pointer = false;
999#endif
1000 x = XEXP (x, 0);
1001 }
38ae7651 1002
923ba36f
JJ
1003 /* Hard registers can be reused for multiple purposes within the same
1004 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1005 on them is wrong. */
1006 if (HARD_REGISTER_P (reg))
1007 return;
1008
38ae7651 1009 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1010 if (MEM_P (x))
1011 {
527210c4
RS
1012 if (MEM_OFFSET_KNOWN_P (x))
1013 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1014 MEM_OFFSET (x) + offset);
de6f3f7a 1015 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1016 mark_reg_pointer (reg, 0);
46b71b03
PB
1017 }
1018 else if (REG_P (x))
1019 {
1020 if (REG_ATTRS (x))
1021 update_reg_offset (reg, x, offset);
de6f3f7a 1022 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1023 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1024 }
1025}
1026
1027/* Generate a REG rtx for a new pseudo register, copying the mode
1028 and attributes from X. */
1029
1030rtx
1031gen_reg_rtx_and_attrs (rtx x)
1032{
1033 rtx reg = gen_reg_rtx (GET_MODE (x));
1034 set_reg_attrs_from_value (reg, x);
1035 return reg;
a560d4d4
JH
1036}
1037
9d18e06b
JZ
1038/* Set the register attributes for registers contained in PARM_RTX.
1039 Use needed values from memory attributes of MEM. */
1040
1041void
502b8322 1042set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1043{
f8cfc6aa 1044 if (REG_P (parm_rtx))
38ae7651 1045 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1046 else if (GET_CODE (parm_rtx) == PARALLEL)
1047 {
1048 /* Check for a NULL entry in the first slot, used to indicate that the
1049 parameter goes both on the stack and in registers. */
1050 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1051 for (; i < XVECLEN (parm_rtx, 0); i++)
1052 {
1053 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1054 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1055 REG_ATTRS (XEXP (x, 0))
1056 = get_reg_attrs (MEM_EXPR (mem),
1057 INTVAL (XEXP (x, 1)));
1058 }
1059 }
1060}
1061
38ae7651
RS
1062/* Set the REG_ATTRS for registers in value X, given that X represents
1063 decl T. */
a560d4d4 1064
4e3825db 1065void
38ae7651
RS
1066set_reg_attrs_for_decl_rtl (tree t, rtx x)
1067{
1068 if (GET_CODE (x) == SUBREG)
fbe6ec81 1069 {
38ae7651
RS
1070 gcc_assert (subreg_lowpart_p (x));
1071 x = SUBREG_REG (x);
fbe6ec81 1072 }
f8cfc6aa 1073 if (REG_P (x))
38ae7651
RS
1074 REG_ATTRS (x)
1075 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1076 DECL_MODE (t)));
a560d4d4
JH
1077 if (GET_CODE (x) == CONCAT)
1078 {
1079 if (REG_P (XEXP (x, 0)))
1080 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1081 if (REG_P (XEXP (x, 1)))
1082 REG_ATTRS (XEXP (x, 1))
1083 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1084 }
1085 if (GET_CODE (x) == PARALLEL)
1086 {
d4afac5b
JZ
1087 int i, start;
1088
1089 /* Check for a NULL entry, used to indicate that the parameter goes
1090 both on the stack and in registers. */
1091 if (XEXP (XVECEXP (x, 0, 0), 0))
1092 start = 0;
1093 else
1094 start = 1;
1095
1096 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1097 {
1098 rtx y = XVECEXP (x, 0, i);
1099 if (REG_P (XEXP (y, 0)))
1100 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1101 }
1102 }
1103}
1104
38ae7651
RS
1105/* Assign the RTX X to declaration T. */
1106
1107void
1108set_decl_rtl (tree t, rtx x)
1109{
1110 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1111 if (x)
1112 set_reg_attrs_for_decl_rtl (t, x);
1113}
1114
5141868d
RS
1115/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1116 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1117
1118void
5141868d 1119set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1120{
1121 DECL_INCOMING_RTL (t) = x;
5141868d 1122 if (x && !by_reference_p)
38ae7651
RS
1123 set_reg_attrs_for_decl_rtl (t, x);
1124}
1125
754fdcca
RK
1126/* Identify REG (which may be a CONCAT) as a user register. */
1127
1128void
502b8322 1129mark_user_reg (rtx reg)
754fdcca
RK
1130{
1131 if (GET_CODE (reg) == CONCAT)
1132 {
1133 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1134 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1135 }
754fdcca 1136 else
5b0264cb
NS
1137 {
1138 gcc_assert (REG_P (reg));
1139 REG_USERVAR_P (reg) = 1;
1140 }
754fdcca
RK
1141}
1142
86fe05e0
RK
1143/* Identify REG as a probable pointer register and show its alignment
1144 as ALIGN, if nonzero. */
23b2ce53
RS
1145
1146void
502b8322 1147mark_reg_pointer (rtx reg, int align)
23b2ce53 1148{
3502dc9c 1149 if (! REG_POINTER (reg))
00995e78 1150 {
3502dc9c 1151 REG_POINTER (reg) = 1;
86fe05e0 1152
00995e78
RE
1153 if (align)
1154 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1155 }
1156 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1157 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1158 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1159}
1160
1161/* Return 1 plus largest pseudo reg number used in the current function. */
1162
1163int
502b8322 1164max_reg_num (void)
23b2ce53
RS
1165{
1166 return reg_rtx_no;
1167}
1168
1169/* Return 1 + the largest label number used so far in the current function. */
1170
1171int
502b8322 1172max_label_num (void)
23b2ce53 1173{
23b2ce53
RS
1174 return label_num;
1175}
1176
1177/* Return first label number used in this function (if any were used). */
1178
1179int
502b8322 1180get_first_label_num (void)
23b2ce53
RS
1181{
1182 return first_label_num;
1183}
6de9cd9a
DN
1184
1185/* If the rtx for label was created during the expansion of a nested
1186 function, then first_label_num won't include this label number.
fa10beec 1187 Fix this now so that array indices work later. */
6de9cd9a
DN
1188
1189void
1190maybe_set_first_label_num (rtx x)
1191{
1192 if (CODE_LABEL_NUMBER (x) < first_label_num)
1193 first_label_num = CODE_LABEL_NUMBER (x);
1194}
23b2ce53
RS
1195\f
1196/* Return a value representing some low-order bits of X, where the number
1197 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1198 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1199 representation is returned.
1200
1201 This function handles the cases in common between gen_lowpart, below,
1202 and two variants in cse.c and combine.c. These are the cases that can
1203 be safely handled at all points in the compilation.
1204
1205 If this is not a case we can handle, return 0. */
1206
1207rtx
502b8322 1208gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1209{
ddef6bc7 1210 int msize = GET_MODE_SIZE (mode);
550d1387 1211 int xsize;
ddef6bc7 1212 int offset = 0;
550d1387
GK
1213 enum machine_mode innermode;
1214
1215 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1216 so we have to make one up. Yuk. */
1217 innermode = GET_MODE (x);
481683e1 1218 if (CONST_INT_P (x)
db487452 1219 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1220 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1221 else if (innermode == VOIDmode)
49ab6098 1222 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1223
550d1387
GK
1224 xsize = GET_MODE_SIZE (innermode);
1225
5b0264cb 1226 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1227
550d1387 1228 if (innermode == mode)
23b2ce53
RS
1229 return x;
1230
1231 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1232 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1233 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1234 return 0;
1235
53501a19 1236 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1237 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1238 return 0;
1239
550d1387 1240 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1241
1242 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1243 && (GET_MODE_CLASS (mode) == MODE_INT
1244 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1245 {
1246 /* If we are getting the low-order part of something that has been
1247 sign- or zero-extended, we can either just use the object being
1248 extended or make a narrower extension. If we want an even smaller
1249 piece than the size of the object being extended, call ourselves
1250 recursively.
1251
1252 This case is used mostly by combine and cse. */
1253
1254 if (GET_MODE (XEXP (x, 0)) == mode)
1255 return XEXP (x, 0);
550d1387 1256 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1257 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1258 else if (msize < xsize)
3b80f6ca 1259 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1260 }
f8cfc6aa 1261 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1262 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1263 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
550d1387 1264 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1265
23b2ce53
RS
1266 /* Otherwise, we can't do this. */
1267 return 0;
1268}
1269\f
ccba022b 1270rtx
502b8322 1271gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1272{
ddef6bc7 1273 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1274 rtx result;
ddef6bc7 1275
ccba022b
RS
1276 /* This case loses if X is a subreg. To catch bugs early,
1277 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1278 gcc_assert (msize <= UNITS_PER_WORD
1279 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1280
e0e08ac2
JH
1281 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1282 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1283 gcc_assert (result);
b8698a0f 1284
09482e0d
JW
1285 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1286 the target if we have a MEM. gen_highpart must return a valid operand,
1287 emitting code if necessary to do so. */
5b0264cb
NS
1288 if (MEM_P (result))
1289 {
1290 result = validize_mem (result);
1291 gcc_assert (result);
1292 }
b8698a0f 1293
e0e08ac2
JH
1294 return result;
1295}
5222e470 1296
26d249eb 1297/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1298 be VOIDmode constant. */
1299rtx
502b8322 1300gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1301{
1302 if (GET_MODE (exp) != VOIDmode)
1303 {
5b0264cb 1304 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1305 return gen_highpart (outermode, exp);
1306 }
1307 return simplify_gen_subreg (outermode, exp, innermode,
1308 subreg_highpart_offset (outermode, innermode));
1309}
68252e27 1310
38ae7651 1311/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1312
e0e08ac2 1313unsigned int
502b8322 1314subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1315{
1316 unsigned int offset = 0;
1317 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1318
e0e08ac2 1319 if (difference > 0)
ccba022b 1320 {
e0e08ac2
JH
1321 if (WORDS_BIG_ENDIAN)
1322 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1323 if (BYTES_BIG_ENDIAN)
1324 offset += difference % UNITS_PER_WORD;
ccba022b 1325 }
ddef6bc7 1326
e0e08ac2 1327 return offset;
ccba022b 1328}
eea50aa0 1329
e0e08ac2
JH
1330/* Return offset in bytes to get OUTERMODE high part
1331 of the value in mode INNERMODE stored in memory in target format. */
1332unsigned int
502b8322 1333subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1334{
1335 unsigned int offset = 0;
1336 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1337
5b0264cb 1338 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1339
eea50aa0
JH
1340 if (difference > 0)
1341 {
e0e08ac2 1342 if (! WORDS_BIG_ENDIAN)
eea50aa0 1343 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1344 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1345 offset += difference % UNITS_PER_WORD;
1346 }
1347
e0e08ac2 1348 return offset;
eea50aa0 1349}
ccba022b 1350
23b2ce53
RS
1351/* Return 1 iff X, assumed to be a SUBREG,
1352 refers to the least significant part of its containing reg.
1353 If X is not a SUBREG, always return 1 (it is its own low part!). */
1354
1355int
fa233e34 1356subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1357{
1358 if (GET_CODE (x) != SUBREG)
1359 return 1;
a3a03040
RK
1360 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1361 return 0;
23b2ce53 1362
e0e08ac2
JH
1363 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1364 == SUBREG_BYTE (x));
23b2ce53 1365}
6a4bdc79
BS
1366
1367/* Return true if X is a paradoxical subreg, false otherwise. */
1368bool
1369paradoxical_subreg_p (const_rtx x)
1370{
1371 if (GET_CODE (x) != SUBREG)
1372 return false;
1373 return (GET_MODE_PRECISION (GET_MODE (x))
1374 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1375}
23b2ce53 1376\f
ddef6bc7
JJ
1377/* Return subword OFFSET of operand OP.
1378 The word number, OFFSET, is interpreted as the word number starting
1379 at the low-order address. OFFSET 0 is the low-order word if not
1380 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1381
1382 If we cannot extract the required word, we return zero. Otherwise,
1383 an rtx corresponding to the requested word will be returned.
1384
1385 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1386 reload has completed, a valid address will always be returned. After
1387 reload, if a valid address cannot be returned, we return zero.
1388
1389 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1390 it is the responsibility of the caller.
1391
1392 MODE is the mode of OP in case it is a CONST_INT.
1393
1394 ??? This is still rather broken for some cases. The problem for the
1395 moment is that all callers of this thing provide no 'goal mode' to
1396 tell us to work with. This exists because all callers were written
0631e0bf
JH
1397 in a word based SUBREG world.
1398 Now use of this function can be deprecated by simplify_subreg in most
1399 cases.
1400 */
ddef6bc7
JJ
1401
1402rtx
502b8322 1403operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1404{
1405 if (mode == VOIDmode)
1406 mode = GET_MODE (op);
1407
5b0264cb 1408 gcc_assert (mode != VOIDmode);
ddef6bc7 1409
30f7a378 1410 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1411 if (mode != BLKmode
1412 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1413 return 0;
1414
30f7a378 1415 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1416 if (mode != BLKmode
1417 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1418 return const0_rtx;
1419
ddef6bc7 1420 /* Form a new MEM at the requested address. */
3c0cb5de 1421 if (MEM_P (op))
ddef6bc7 1422 {
60564289 1423 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1424
f1ec5147 1425 if (! validate_address)
60564289 1426 return new_rtx;
f1ec5147
RK
1427
1428 else if (reload_completed)
ddef6bc7 1429 {
09e881c9
BE
1430 if (! strict_memory_address_addr_space_p (word_mode,
1431 XEXP (new_rtx, 0),
1432 MEM_ADDR_SPACE (op)))
f1ec5147 1433 return 0;
ddef6bc7 1434 }
f1ec5147 1435 else
60564289 1436 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1437 }
1438
0631e0bf
JH
1439 /* Rest can be handled by simplify_subreg. */
1440 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1441}
1442
535a42b1
NS
1443/* Similar to `operand_subword', but never return 0. If we can't
1444 extract the required subword, put OP into a register and try again.
1445 The second attempt must succeed. We always validate the address in
1446 this case.
23b2ce53
RS
1447
1448 MODE is the mode of OP, in case it is CONST_INT. */
1449
1450rtx
502b8322 1451operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1452{
ddef6bc7 1453 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1454
1455 if (result)
1456 return result;
1457
1458 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1459 {
1460 /* If this is a register which can not be accessed by words, copy it
1461 to a pseudo register. */
f8cfc6aa 1462 if (REG_P (op))
77e6b0eb
JC
1463 op = copy_to_reg (op);
1464 else
1465 op = force_reg (mode, op);
1466 }
23b2ce53 1467
ddef6bc7 1468 result = operand_subword (op, offset, 1, mode);
5b0264cb 1469 gcc_assert (result);
23b2ce53
RS
1470
1471 return result;
1472}
1473\f
2b3493c8
AK
1474/* Returns 1 if both MEM_EXPR can be considered equal
1475 and 0 otherwise. */
1476
1477int
4f588890 1478mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1479{
1480 if (expr1 == expr2)
1481 return 1;
1482
1483 if (! expr1 || ! expr2)
1484 return 0;
1485
1486 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1487 return 0;
1488
55b34b5f 1489 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1490}
1491
805903b5
JJ
1492/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1493 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1494 -1 if not known. */
1495
1496int
d9223014 1497get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1498{
1499 tree expr;
1500 unsigned HOST_WIDE_INT offset;
1501
1502 /* This function can't use
527210c4 1503 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1504 || (MAX (MEM_ALIGN (mem),
0eb77834 1505 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1506 < align))
1507 return -1;
1508 else
527210c4 1509 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1510 for two reasons:
1511 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1512 for <variable>. get_inner_reference doesn't handle it and
1513 even if it did, the alignment in that case needs to be determined
1514 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1515 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1516 isn't sufficiently aligned, the object it is in might be. */
1517 gcc_assert (MEM_P (mem));
1518 expr = MEM_EXPR (mem);
527210c4 1519 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1520 return -1;
1521
527210c4 1522 offset = MEM_OFFSET (mem);
805903b5
JJ
1523 if (DECL_P (expr))
1524 {
1525 if (DECL_ALIGN (expr) < align)
1526 return -1;
1527 }
1528 else if (INDIRECT_REF_P (expr))
1529 {
1530 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1531 return -1;
1532 }
1533 else if (TREE_CODE (expr) == COMPONENT_REF)
1534 {
1535 while (1)
1536 {
1537 tree inner = TREE_OPERAND (expr, 0);
1538 tree field = TREE_OPERAND (expr, 1);
1539 tree byte_offset = component_ref_field_offset (expr);
1540 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1541
1542 if (!byte_offset
1543 || !host_integerp (byte_offset, 1)
1544 || !host_integerp (bit_offset, 1))
1545 return -1;
1546
1547 offset += tree_low_cst (byte_offset, 1);
1548 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1549
1550 if (inner == NULL_TREE)
1551 {
1552 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1553 < (unsigned int) align)
1554 return -1;
1555 break;
1556 }
1557 else if (DECL_P (inner))
1558 {
1559 if (DECL_ALIGN (inner) < align)
1560 return -1;
1561 break;
1562 }
1563 else if (TREE_CODE (inner) != COMPONENT_REF)
1564 return -1;
1565 expr = inner;
1566 }
1567 }
1568 else
1569 return -1;
1570
1571 return offset & ((align / BITS_PER_UNIT) - 1);
1572}
1573
6926c713 1574/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1575 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1576 if we are making a new object of this type. BITPOS is nonzero if
1577 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1578
1579void
502b8322
AJ
1580set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1581 HOST_WIDE_INT bitpos)
173b24b9 1582{
6f1087be 1583 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1584 tree type;
f12144dd 1585 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1586 addr_space_t as;
173b24b9
RK
1587
1588 /* It can happen that type_for_mode was given a mode for which there
1589 is no language-level type. In which case it returns NULL, which
1590 we can see here. */
1591 if (t == NULL_TREE)
1592 return;
1593
1594 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1595 if (type == error_mark_node)
1596 return;
173b24b9 1597
173b24b9
RK
1598 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1599 wrong answer, as it assumes that DECL_RTL already has the right alias
1600 info. Callers should not set DECL_RTL until after the call to
1601 set_mem_attributes. */
5b0264cb 1602 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1603
f12144dd
RS
1604 memset (&attrs, 0, sizeof (attrs));
1605
738cc472 1606 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1607 front-end routine) and use it. */
f12144dd 1608 attrs.alias = get_alias_set (t);
173b24b9 1609
a5e9c810 1610 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1611 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1612
268f7033 1613 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1614 refattrs = MEM_ATTRS (ref);
1615 if (refattrs)
268f7033
UW
1616 {
1617 /* ??? Can this ever happen? Calling this routine on a MEM that
1618 already carries memory attributes should probably be invalid. */
f12144dd 1619 attrs.expr = refattrs->expr;
754c3d5d 1620 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1621 attrs.offset = refattrs->offset;
754c3d5d 1622 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1623 attrs.size = refattrs->size;
1624 attrs.align = refattrs->align;
268f7033
UW
1625 }
1626
1627 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1628 else
268f7033 1629 {
f12144dd
RS
1630 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1631 gcc_assert (!defattrs->expr);
754c3d5d 1632 gcc_assert (!defattrs->offset_known_p);
f12144dd 1633
268f7033 1634 /* Respect mode size. */
754c3d5d 1635 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1636 attrs.size = defattrs->size;
268f7033
UW
1637 /* ??? Is this really necessary? We probably should always get
1638 the size from the type below. */
1639
1640 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1641 if T is an object, always compute the object alignment below. */
f12144dd
RS
1642 if (TYPE_P (t))
1643 attrs.align = defattrs->align;
1644 else
1645 attrs.align = BITS_PER_UNIT;
268f7033
UW
1646 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1647 e.g. if the type carries an alignment attribute. Should we be
1648 able to simply always use TYPE_ALIGN? */
1649 }
1650
c3d32120
RK
1651 /* We can set the alignment from the type if we are making an object,
1652 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1653 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1654 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1655
738cc472 1656 /* If the size is known, we can set that. */
a787ccc3 1657 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1658
30b0317c
RB
1659 /* The address-space is that of the type. */
1660 as = TYPE_ADDR_SPACE (type);
1661
80965c18
RK
1662 /* If T is not a type, we may be able to deduce some more information about
1663 the expression. */
1664 if (! TYPE_P (t))
8ac61af7 1665 {
8476af98 1666 tree base;
389fdba0 1667
8ac61af7
RK
1668 if (TREE_THIS_VOLATILE (t))
1669 MEM_VOLATILE_P (ref) = 1;
173b24b9 1670
c56e3582
RK
1671 /* Now remove any conversions: they don't change what the underlying
1672 object is. Likewise for SAVE_EXPR. */
1043771b 1673 while (CONVERT_EXPR_P (t)
c56e3582
RK
1674 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1675 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1676 t = TREE_OPERAND (t, 0);
1677
4994da65
RG
1678 /* Note whether this expression can trap. */
1679 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1680
1681 base = get_base_address (t);
f18a7b25
MJ
1682 if (base)
1683 {
1684 if (DECL_P (base)
1685 && TREE_READONLY (base)
1686 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1687 && !TREE_THIS_VOLATILE (base))
1688 MEM_READONLY_P (ref) = 1;
1689
1690 /* Mark static const strings readonly as well. */
1691 if (TREE_CODE (base) == STRING_CST
1692 && TREE_READONLY (base)
1693 && TREE_STATIC (base))
1694 MEM_READONLY_P (ref) = 1;
1695
30b0317c 1696 /* Address-space information is on the base object. */
f18a7b25
MJ
1697 if (TREE_CODE (base) == MEM_REF
1698 || TREE_CODE (base) == TARGET_MEM_REF)
1699 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1700 0))));
1701 else
1702 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1703 }
ba30e50d 1704
2039d7aa
RH
1705 /* If this expression uses it's parent's alias set, mark it such
1706 that we won't change it. */
1707 if (component_uses_parent_alias_set (t))
10b76d73
RK
1708 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1709
8ac61af7
RK
1710 /* If this is a decl, set the attributes of the MEM from it. */
1711 if (DECL_P (t))
1712 {
f12144dd 1713 attrs.expr = t;
754c3d5d
RS
1714 attrs.offset_known_p = true;
1715 attrs.offset = 0;
6f1087be 1716 apply_bitpos = bitpos;
a787ccc3 1717 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1718 }
1719
30b0317c 1720 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1721 else if (CONSTANT_CLASS_P (t))
30b0317c 1722 ;
998d7deb 1723
a787ccc3
RS
1724 /* If this is a field reference, record it. */
1725 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1726 {
f12144dd 1727 attrs.expr = t;
754c3d5d
RS
1728 attrs.offset_known_p = true;
1729 attrs.offset = 0;
6f1087be 1730 apply_bitpos = bitpos;
a787ccc3
RS
1731 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1732 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1733 }
1734
1735 /* If this is an array reference, look for an outer field reference. */
1736 else if (TREE_CODE (t) == ARRAY_REF)
1737 {
1738 tree off_tree = size_zero_node;
1b1838b6
JW
1739 /* We can't modify t, because we use it at the end of the
1740 function. */
1741 tree t2 = t;
998d7deb
RH
1742
1743 do
1744 {
1b1838b6 1745 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1746 tree low_bound = array_ref_low_bound (t2);
1747 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1748
1749 /* We assume all arrays have sizes that are a multiple of a byte.
1750 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1751 index, then convert to sizetype and multiply by the size of
1752 the array element. */
1753 if (! integer_zerop (low_bound))
4845b383
KH
1754 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1755 index, low_bound);
2567406a 1756
44de5aeb 1757 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1758 size_binop (MULT_EXPR,
1759 fold_convert (sizetype,
1760 index),
44de5aeb
RK
1761 unit_size),
1762 off_tree);
1b1838b6 1763 t2 = TREE_OPERAND (t2, 0);
998d7deb 1764 }
1b1838b6 1765 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1766
30b0317c
RB
1767 if (DECL_P (t2)
1768 || TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1769 {
f12144dd 1770 attrs.expr = t2;
754c3d5d 1771 attrs.offset_known_p = false;
998d7deb 1772 if (host_integerp (off_tree, 1))
6f1087be 1773 {
754c3d5d
RS
1774 attrs.offset_known_p = true;
1775 attrs.offset = tree_low_cst (off_tree, 1);
6f1087be
RH
1776 apply_bitpos = bitpos;
1777 }
998d7deb 1778 }
30b0317c 1779 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1780 }
1781
56c47f22 1782 /* If this is an indirect reference, record it. */
70f34814 1783 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1784 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1785 {
f12144dd 1786 attrs.expr = t;
754c3d5d
RS
1787 attrs.offset_known_p = true;
1788 attrs.offset = 0;
56c47f22
RG
1789 apply_bitpos = bitpos;
1790 }
1791
30b0317c
RB
1792 /* Compute the alignment. */
1793 unsigned int obj_align;
1794 unsigned HOST_WIDE_INT obj_bitpos;
1795 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1796 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1797 if (obj_bitpos != 0)
1798 obj_align = (obj_bitpos & -obj_bitpos);
1799 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
1800 }
1801
a787ccc3
RS
1802 if (host_integerp (new_size, 1))
1803 {
1804 attrs.size_known_p = true;
1805 attrs.size = tree_low_cst (new_size, 1);
1806 }
1807
15c812e3 1808 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1809 bit position offset. Similarly, increase the size of the accessed
1810 object to contain the negative offset. */
6f1087be 1811 if (apply_bitpos)
8c317c5f 1812 {
754c3d5d
RS
1813 gcc_assert (attrs.offset_known_p);
1814 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1815 if (attrs.size_known_p)
1816 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1817 }
6f1087be 1818
8ac61af7 1819 /* Now set the attributes we computed above. */
f18a7b25 1820 attrs.addrspace = as;
f12144dd 1821 set_mem_attrs (ref, &attrs);
173b24b9
RK
1822}
1823
6f1087be 1824void
502b8322 1825set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1826{
1827 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1828}
1829
173b24b9
RK
1830/* Set the alias set of MEM to SET. */
1831
1832void
4862826d 1833set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1834{
f12144dd
RS
1835 struct mem_attrs attrs;
1836
173b24b9 1837 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1838 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
1839 attrs = *get_mem_attrs (mem);
1840 attrs.alias = set;
1841 set_mem_attrs (mem, &attrs);
09e881c9
BE
1842}
1843
1844/* Set the address space of MEM to ADDRSPACE (target-defined). */
1845
1846void
1847set_mem_addr_space (rtx mem, addr_space_t addrspace)
1848{
f12144dd
RS
1849 struct mem_attrs attrs;
1850
1851 attrs = *get_mem_attrs (mem);
1852 attrs.addrspace = addrspace;
1853 set_mem_attrs (mem, &attrs);
173b24b9 1854}
738cc472 1855
d022d93e 1856/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1857
1858void
502b8322 1859set_mem_align (rtx mem, unsigned int align)
738cc472 1860{
f12144dd
RS
1861 struct mem_attrs attrs;
1862
1863 attrs = *get_mem_attrs (mem);
1864 attrs.align = align;
1865 set_mem_attrs (mem, &attrs);
738cc472 1866}
1285011e 1867
998d7deb 1868/* Set the expr for MEM to EXPR. */
1285011e
RK
1869
1870void
502b8322 1871set_mem_expr (rtx mem, tree expr)
1285011e 1872{
f12144dd
RS
1873 struct mem_attrs attrs;
1874
1875 attrs = *get_mem_attrs (mem);
1876 attrs.expr = expr;
1877 set_mem_attrs (mem, &attrs);
1285011e 1878}
998d7deb
RH
1879
1880/* Set the offset of MEM to OFFSET. */
1881
1882void
527210c4 1883set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 1884{
f12144dd
RS
1885 struct mem_attrs attrs;
1886
1887 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1888 attrs.offset_known_p = true;
1889 attrs.offset = offset;
527210c4
RS
1890 set_mem_attrs (mem, &attrs);
1891}
1892
1893/* Clear the offset of MEM. */
1894
1895void
1896clear_mem_offset (rtx mem)
1897{
1898 struct mem_attrs attrs;
1899
1900 attrs = *get_mem_attrs (mem);
754c3d5d 1901 attrs.offset_known_p = false;
f12144dd 1902 set_mem_attrs (mem, &attrs);
35aff10b
AM
1903}
1904
1905/* Set the size of MEM to SIZE. */
1906
1907void
f5541398 1908set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 1909{
f12144dd
RS
1910 struct mem_attrs attrs;
1911
1912 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1913 attrs.size_known_p = true;
1914 attrs.size = size;
f5541398
RS
1915 set_mem_attrs (mem, &attrs);
1916}
1917
1918/* Clear the size of MEM. */
1919
1920void
1921clear_mem_size (rtx mem)
1922{
1923 struct mem_attrs attrs;
1924
1925 attrs = *get_mem_attrs (mem);
754c3d5d 1926 attrs.size_known_p = false;
f12144dd 1927 set_mem_attrs (mem, &attrs);
998d7deb 1928}
173b24b9 1929\f
738cc472
RK
1930/* Return a memory reference like MEMREF, but with its mode changed to MODE
1931 and its address changed to ADDR. (VOIDmode means don't change the mode.
1932 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1933 returned memory location is required to be valid. The memory
1934 attributes are not changed. */
23b2ce53 1935
738cc472 1936static rtx
502b8322 1937change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1938{
09e881c9 1939 addr_space_t as;
60564289 1940 rtx new_rtx;
23b2ce53 1941
5b0264cb 1942 gcc_assert (MEM_P (memref));
09e881c9 1943 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
1944 if (mode == VOIDmode)
1945 mode = GET_MODE (memref);
1946 if (addr == 0)
1947 addr = XEXP (memref, 0);
a74ff877 1948 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 1949 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 1950 return memref;
23b2ce53 1951
f1ec5147 1952 if (validate)
23b2ce53 1953 {
f1ec5147 1954 if (reload_in_progress || reload_completed)
09e881c9 1955 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 1956 else
09e881c9 1957 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 1958 }
750c9258 1959
9b04c6a8
RK
1960 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1961 return memref;
1962
60564289
KG
1963 new_rtx = gen_rtx_MEM (mode, addr);
1964 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1965 return new_rtx;
23b2ce53 1966}
792760b9 1967
738cc472
RK
1968/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1969 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1970
1971rtx
502b8322 1972change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1973{
f12144dd 1974 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
60564289 1975 enum machine_mode mmode = GET_MODE (new_rtx);
f12144dd 1976 struct mem_attrs attrs, *defattrs;
4e44c1ef 1977
f12144dd
RS
1978 attrs = *get_mem_attrs (memref);
1979 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
1980 attrs.expr = NULL_TREE;
1981 attrs.offset_known_p = false;
1982 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
1983 attrs.size = defattrs->size;
1984 attrs.align = defattrs->align;
c2f7bcc3 1985
fdb1c7b3 1986 /* If there are no changes, just return the original memory reference. */
60564289 1987 if (new_rtx == memref)
4e44c1ef 1988 {
f12144dd 1989 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 1990 return new_rtx;
4e44c1ef 1991
60564289
KG
1992 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1993 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 1994 }
fdb1c7b3 1995
f12144dd 1996 set_mem_attrs (new_rtx, &attrs);
60564289 1997 return new_rtx;
f4ef873c 1998}
792760b9 1999
738cc472
RK
2000/* Return a memory reference like MEMREF, but with its mode changed
2001 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2002 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2003 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2004 and the caller is responsible for adjusting MEMREF base register.
2005 If ADJUST_OBJECT is zero, the underlying object associated with the
2006 memory reference is left unchanged and the caller is responsible for
2007 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2008 the underlying object, even partially, then the object is dropped.
2009 SIZE, if nonzero, is the size of an access in cases where MODE
2010 has no inherent size. */
f1ec5147
RK
2011
2012rtx
502b8322 2013adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2014 int validate, int adjust_address, int adjust_object,
2015 HOST_WIDE_INT size)
f1ec5147 2016{
823e3574 2017 rtx addr = XEXP (memref, 0);
60564289 2018 rtx new_rtx;
f12144dd 2019 enum machine_mode address_mode;
a6fe9ed4 2020 int pbits;
0207fa90 2021 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2022 unsigned HOST_WIDE_INT max_align;
0207fa90
EB
2023#ifdef POINTERS_EXTEND_UNSIGNED
2024 enum machine_mode pointer_mode
2025 = targetm.addr_space.pointer_mode (attrs.addrspace);
2026#endif
823e3574 2027
ee88e690
EB
2028 /* VOIDmode means no mode change for change_address_1. */
2029 if (mode == VOIDmode)
2030 mode = GET_MODE (memref);
2031
5f2cbd0d
RS
2032 /* Take the size of non-BLKmode accesses from the mode. */
2033 defattrs = mode_mem_attrs[(int) mode];
2034 if (defattrs->size_known_p)
2035 size = defattrs->size;
2036
fdb1c7b3
JH
2037 /* If there are no changes, just return the original memory reference. */
2038 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2039 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2040 && (!validate || memory_address_addr_space_p (mode, addr,
2041 attrs.addrspace)))
fdb1c7b3
JH
2042 return memref;
2043
d14419e4 2044 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2045 This may happen even if offset is nonzero -- consider
d14419e4
RH
2046 (plus (plus reg reg) const_int) -- so do this always. */
2047 addr = copy_rtx (addr);
2048
a6fe9ed4
JM
2049 /* Convert a possibly large offset to a signed value within the
2050 range of the target address space. */
372d6395 2051 address_mode = get_address_mode (memref);
d4ebfa65 2052 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2053 if (HOST_BITS_PER_WIDE_INT > pbits)
2054 {
2055 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2056 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2057 >> shift);
2058 }
2059
5ef0b50d 2060 if (adjust_address)
4a78c787
RH
2061 {
2062 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2063 object, we can merge it into the LO_SUM. */
2064 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2065 && offset >= 0
2066 && (unsigned HOST_WIDE_INT) offset
2067 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2068 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2069 plus_constant (address_mode,
2070 XEXP (addr, 1), offset));
0207fa90
EB
2071#ifdef POINTERS_EXTEND_UNSIGNED
2072 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2073 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2074 the fact that pointers are not allowed to overflow. */
2075 else if (POINTERS_EXTEND_UNSIGNED > 0
2076 && GET_CODE (addr) == ZERO_EXTEND
2077 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2078 && trunc_int_for_mode (offset, pointer_mode) == offset)
2079 addr = gen_rtx_ZERO_EXTEND (address_mode,
2080 plus_constant (pointer_mode,
2081 XEXP (addr, 0), offset));
2082#endif
4a78c787 2083 else
0a81f074 2084 addr = plus_constant (address_mode, addr, offset);
4a78c787 2085 }
823e3574 2086
60564289 2087 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472 2088
09efeca1
PB
2089 /* If the address is a REG, change_address_1 rightfully returns memref,
2090 but this would destroy memref's MEM_ATTRS. */
2091 if (new_rtx == memref && offset != 0)
2092 new_rtx = copy_rtx (new_rtx);
2093
5ef0b50d
EB
2094 /* Conservatively drop the object if we don't know where we start from. */
2095 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2096 {
2097 attrs.expr = NULL_TREE;
2098 attrs.alias = 0;
2099 }
2100
738cc472
RK
2101 /* Compute the new values of the memory attributes due to this adjustment.
2102 We add the offsets and update the alignment. */
754c3d5d 2103 if (attrs.offset_known_p)
5ef0b50d
EB
2104 {
2105 attrs.offset += offset;
2106
2107 /* Drop the object if the new left end is not within its bounds. */
2108 if (adjust_object && attrs.offset < 0)
2109 {
2110 attrs.expr = NULL_TREE;
2111 attrs.alias = 0;
2112 }
2113 }
738cc472 2114
03bf2c23
RK
2115 /* Compute the new alignment by taking the MIN of the alignment and the
2116 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2117 if zero. */
2118 if (offset != 0)
f12144dd
RS
2119 {
2120 max_align = (offset & -offset) * BITS_PER_UNIT;
2121 attrs.align = MIN (attrs.align, max_align);
2122 }
738cc472 2123
5f2cbd0d 2124 if (size)
754c3d5d 2125 {
5ef0b50d 2126 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2127 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2128 {
2129 attrs.expr = NULL_TREE;
2130 attrs.alias = 0;
2131 }
754c3d5d 2132 attrs.size_known_p = true;
5f2cbd0d 2133 attrs.size = size;
754c3d5d
RS
2134 }
2135 else if (attrs.size_known_p)
5ef0b50d 2136 {
5f2cbd0d 2137 gcc_assert (!adjust_object);
5ef0b50d 2138 attrs.size -= offset;
5f2cbd0d
RS
2139 /* ??? The store_by_pieces machinery generates negative sizes,
2140 so don't assert for that here. */
5ef0b50d 2141 }
10b76d73 2142
f12144dd 2143 set_mem_attrs (new_rtx, &attrs);
738cc472 2144
60564289 2145 return new_rtx;
f1ec5147
RK
2146}
2147
630036c6
JJ
2148/* Return a memory reference like MEMREF, but with its mode changed
2149 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2150 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2151 nonzero, the memory address is forced to be valid. */
2152
2153rtx
502b8322
AJ
2154adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2155 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2156{
2157 memref = change_address_1 (memref, VOIDmode, addr, validate);
5f2cbd0d 2158 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2159}
2160
8ac61af7
RK
2161/* Return a memory reference like MEMREF, but whose address is changed by
2162 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2163 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2164
2165rtx
502b8322 2166offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2167{
60564289 2168 rtx new_rtx, addr = XEXP (memref, 0);
f12144dd 2169 enum machine_mode address_mode;
754c3d5d 2170 struct mem_attrs attrs, *defattrs;
e3c8ea67 2171
f12144dd 2172 attrs = *get_mem_attrs (memref);
372d6395 2173 address_mode = get_address_mode (memref);
d4ebfa65 2174 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2175
68252e27 2176 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2177 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2178
2179 However, if we did go and rearrange things, we can wind up not
2180 being able to recognize the magic around pic_offset_table_rtx.
2181 This stuff is fragile, and is yet another example of why it is
2182 bad to expose PIC machinery too early. */
f12144dd
RS
2183 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2184 attrs.addrspace)
e3c8ea67
RH
2185 && GET_CODE (addr) == PLUS
2186 && XEXP (addr, 0) == pic_offset_table_rtx)
2187 {
2188 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2189 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2190 }
2191
60564289
KG
2192 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2193 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2194
fdb1c7b3 2195 /* If there are no changes, just return the original memory reference. */
60564289
KG
2196 if (new_rtx == memref)
2197 return new_rtx;
fdb1c7b3 2198
0d4903b8
RK
2199 /* Update the alignment to reflect the offset. Reset the offset, which
2200 we don't know. */
754c3d5d
RS
2201 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2202 attrs.offset_known_p = false;
2203 attrs.size_known_p = defattrs->size_known_p;
2204 attrs.size = defattrs->size;
f12144dd
RS
2205 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2206 set_mem_attrs (new_rtx, &attrs);
60564289 2207 return new_rtx;
0d4903b8 2208}
68252e27 2209
792760b9
RK
2210/* Return a memory reference like MEMREF, but with its address changed to
2211 ADDR. The caller is asserting that the actual piece of memory pointed
2212 to is the same, just the form of the address is being changed, such as
2213 by putting something into a register. */
2214
2215rtx
502b8322 2216replace_equiv_address (rtx memref, rtx addr)
792760b9 2217{
738cc472
RK
2218 /* change_address_1 copies the memory attribute structure without change
2219 and that's exactly what we want here. */
40c0668b 2220 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2221 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2222}
738cc472 2223
f1ec5147
RK
2224/* Likewise, but the reference is not required to be valid. */
2225
2226rtx
502b8322 2227replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2228{
f1ec5147
RK
2229 return change_address_1 (memref, VOIDmode, addr, 0);
2230}
e7dfe4bb
RH
2231
2232/* Return a memory reference like MEMREF, but with its mode widened to
2233 MODE and offset by OFFSET. This would be used by targets that e.g.
2234 cannot issue QImode memory operations and have to use SImode memory
2235 operations plus masking logic. */
2236
2237rtx
502b8322 2238widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2239{
5f2cbd0d 2240 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2241 struct mem_attrs attrs;
e7dfe4bb
RH
2242 unsigned int size = GET_MODE_SIZE (mode);
2243
fdb1c7b3 2244 /* If there are no changes, just return the original memory reference. */
60564289
KG
2245 if (new_rtx == memref)
2246 return new_rtx;
fdb1c7b3 2247
f12144dd
RS
2248 attrs = *get_mem_attrs (new_rtx);
2249
e7dfe4bb
RH
2250 /* If we don't know what offset we were at within the expression, then
2251 we can't know if we've overstepped the bounds. */
754c3d5d 2252 if (! attrs.offset_known_p)
f12144dd 2253 attrs.expr = NULL_TREE;
e7dfe4bb 2254
f12144dd 2255 while (attrs.expr)
e7dfe4bb 2256 {
f12144dd 2257 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2258 {
f12144dd
RS
2259 tree field = TREE_OPERAND (attrs.expr, 1);
2260 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2261
2262 if (! DECL_SIZE_UNIT (field))
2263 {
f12144dd 2264 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2265 break;
2266 }
2267
2268 /* Is the field at least as large as the access? If so, ok,
2269 otherwise strip back to the containing structure. */
03667700
RK
2270 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2271 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2272 && attrs.offset >= 0)
e7dfe4bb
RH
2273 break;
2274
44de5aeb 2275 if (! host_integerp (offset, 1))
e7dfe4bb 2276 {
f12144dd 2277 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2278 break;
2279 }
2280
f12144dd 2281 attrs.expr = TREE_OPERAND (attrs.expr, 0);
754c3d5d
RS
2282 attrs.offset += tree_low_cst (offset, 1);
2283 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2284 / BITS_PER_UNIT);
e7dfe4bb
RH
2285 }
2286 /* Similarly for the decl. */
f12144dd
RS
2287 else if (DECL_P (attrs.expr)
2288 && DECL_SIZE_UNIT (attrs.expr)
2289 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2290 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2291 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2292 break;
2293 else
2294 {
2295 /* The widened memory access overflows the expression, which means
2296 that it could alias another expression. Zap it. */
f12144dd 2297 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2298 break;
2299 }
2300 }
2301
f12144dd 2302 if (! attrs.expr)
754c3d5d 2303 attrs.offset_known_p = false;
e7dfe4bb
RH
2304
2305 /* The widened memory may alias other stuff, so zap the alias set. */
2306 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2307 attrs.alias = 0;
754c3d5d
RS
2308 attrs.size_known_p = true;
2309 attrs.size = size;
f12144dd 2310 set_mem_attrs (new_rtx, &attrs);
60564289 2311 return new_rtx;
e7dfe4bb 2312}
23b2ce53 2313\f
f6129d66
RH
2314/* A fake decl that is used as the MEM_EXPR of spill slots. */
2315static GTY(()) tree spill_slot_decl;
2316
3d7e23f6
RH
2317tree
2318get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2319{
2320 tree d = spill_slot_decl;
2321 rtx rd;
f12144dd 2322 struct mem_attrs attrs;
f6129d66 2323
3d7e23f6 2324 if (d || !force_build_p)
f6129d66
RH
2325 return d;
2326
c2255bc4
AH
2327 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2328 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2329 DECL_ARTIFICIAL (d) = 1;
2330 DECL_IGNORED_P (d) = 1;
2331 TREE_USED (d) = 1;
f6129d66
RH
2332 spill_slot_decl = d;
2333
2334 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2335 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2336 attrs = *mode_mem_attrs[(int) BLKmode];
2337 attrs.alias = new_alias_set ();
2338 attrs.expr = d;
2339 set_mem_attrs (rd, &attrs);
f6129d66
RH
2340 SET_DECL_RTL (d, rd);
2341
2342 return d;
2343}
2344
2345/* Given MEM, a result from assign_stack_local, fill in the memory
2346 attributes as appropriate for a register allocator spill slot.
2347 These slots are not aliasable by other memory. We arrange for
2348 them all to use a single MEM_EXPR, so that the aliasing code can
2349 work properly in the case of shared spill slots. */
2350
2351void
2352set_mem_attrs_for_spill (rtx mem)
2353{
f12144dd
RS
2354 struct mem_attrs attrs;
2355 rtx addr;
f6129d66 2356
f12144dd
RS
2357 attrs = *get_mem_attrs (mem);
2358 attrs.expr = get_spill_slot_decl (true);
2359 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2360 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2361
2362 /* We expect the incoming memory to be of the form:
2363 (mem:MODE (plus (reg sfp) (const_int offset)))
2364 with perhaps the plus missing for offset = 0. */
2365 addr = XEXP (mem, 0);
754c3d5d
RS
2366 attrs.offset_known_p = true;
2367 attrs.offset = 0;
f6129d66 2368 if (GET_CODE (addr) == PLUS
481683e1 2369 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2370 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2371
f12144dd 2372 set_mem_attrs (mem, &attrs);
f6129d66
RH
2373 MEM_NOTRAP_P (mem) = 1;
2374}
2375\f
23b2ce53
RS
2376/* Return a newly created CODE_LABEL rtx with a unique label number. */
2377
2378rtx
502b8322 2379gen_label_rtx (void)
23b2ce53 2380{
0dc36574 2381 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2382 NULL, label_num++, NULL);
23b2ce53
RS
2383}
2384\f
2385/* For procedure integration. */
2386
23b2ce53 2387/* Install new pointers to the first and last insns in the chain.
86fe05e0 2388 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2389 Used for an inline-procedure after copying the insn chain. */
2390
2391void
502b8322 2392set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2393{
86fe05e0
RK
2394 rtx insn;
2395
5936d944
JH
2396 set_first_insn (first);
2397 set_last_insn (last);
86fe05e0
RK
2398 cur_insn_uid = 0;
2399
b5b8b0ac
AO
2400 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2401 {
2402 int debug_count = 0;
2403
2404 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2405 cur_debug_insn_uid = 0;
2406
2407 for (insn = first; insn; insn = NEXT_INSN (insn))
2408 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2409 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2410 else
2411 {
2412 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2413 if (DEBUG_INSN_P (insn))
2414 debug_count++;
2415 }
2416
2417 if (debug_count)
2418 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2419 else
2420 cur_debug_insn_uid++;
2421 }
2422 else
2423 for (insn = first; insn; insn = NEXT_INSN (insn))
2424 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2425
2426 cur_insn_uid++;
23b2ce53 2427}
23b2ce53 2428\f
750c9258 2429/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2430 structure. This routine should only be called once. */
23b2ce53 2431
fd743bc1 2432static void
b4aaa77b 2433unshare_all_rtl_1 (rtx insn)
23b2ce53 2434{
d1b81779 2435 /* Unshare just about everything else. */
2c07f13b 2436 unshare_all_rtl_in_chain (insn);
750c9258 2437
23b2ce53
RS
2438 /* Make sure the addresses of stack slots found outside the insn chain
2439 (such as, in DECL_RTL of a variable) are not shared
2440 with the insn chain.
2441
2442 This special care is necessary when the stack slot MEM does not
2443 actually appear in the insn chain. If it does appear, its address
2444 is unshared from all else at that point. */
242b0ce6 2445 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2446}
2447
750c9258 2448/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2449 structure, again. This is a fairly expensive thing to do so it
2450 should be done sparingly. */
2451
2452void
502b8322 2453unshare_all_rtl_again (rtx insn)
d1b81779
GK
2454{
2455 rtx p;
624c87aa
RE
2456 tree decl;
2457
d1b81779 2458 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2459 if (INSN_P (p))
d1b81779
GK
2460 {
2461 reset_used_flags (PATTERN (p));
2462 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2463 if (CALL_P (p))
2464 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2465 }
624c87aa 2466
2d4aecb3 2467 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2468 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2469
624c87aa 2470 /* Make sure that virtual parameters are not shared. */
910ad8de 2471 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2472 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2473
2474 reset_used_flags (stack_slot_list);
2475
b4aaa77b 2476 unshare_all_rtl_1 (insn);
fd743bc1
PB
2477}
2478
c2924966 2479unsigned int
fd743bc1
PB
2480unshare_all_rtl (void)
2481{
b4aaa77b 2482 unshare_all_rtl_1 (get_insns ());
c2924966 2483 return 0;
d1b81779
GK
2484}
2485
ef330312 2486
2c07f13b
JH
2487/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2488 Recursively does the same for subexpressions. */
2489
2490static void
2491verify_rtx_sharing (rtx orig, rtx insn)
2492{
2493 rtx x = orig;
2494 int i;
2495 enum rtx_code code;
2496 const char *format_ptr;
2497
2498 if (x == 0)
2499 return;
2500
2501 code = GET_CODE (x);
2502
2503 /* These types may be freely shared. */
2504
2505 switch (code)
2506 {
2507 case REG:
0ca5af51
AO
2508 case DEBUG_EXPR:
2509 case VALUE:
d8116890 2510 CASE_CONST_ANY:
2c07f13b
JH
2511 case SYMBOL_REF:
2512 case LABEL_REF:
2513 case CODE_LABEL:
2514 case PC:
2515 case CC0:
3810076b 2516 case RETURN:
26898771 2517 case SIMPLE_RETURN:
2c07f13b 2518 case SCRATCH:
3e89ed8d 2519 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2520 return;
3e89ed8d 2521 case CLOBBER:
c5c5ba89
JH
2522 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2523 clobbers or clobbers of hard registers that originated as pseudos.
2524 This is needed to allow safe register renaming. */
2525 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2526 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2527 return;
2528 break;
2c07f13b
JH
2529
2530 case CONST:
6fb5fa3c 2531 if (shared_const_p (orig))
2c07f13b
JH
2532 return;
2533 break;
2534
2535 case MEM:
2536 /* A MEM is allowed to be shared if its address is constant. */
2537 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2538 || reload_completed || reload_in_progress)
2539 return;
2540
2541 break;
2542
2543 default:
2544 break;
2545 }
2546
2547 /* This rtx may not be shared. If it has already been seen,
2548 replace it with a copy of itself. */
1a2caa7a 2549#ifdef ENABLE_CHECKING
2c07f13b
JH
2550 if (RTX_FLAG (x, used))
2551 {
ab532386 2552 error ("invalid rtl sharing found in the insn");
2c07f13b 2553 debug_rtx (insn);
ab532386 2554 error ("shared rtx");
2c07f13b 2555 debug_rtx (x);
ab532386 2556 internal_error ("internal consistency failure");
2c07f13b 2557 }
1a2caa7a
NS
2558#endif
2559 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2560
2c07f13b
JH
2561 RTX_FLAG (x, used) = 1;
2562
6614fd40 2563 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2564
2565 format_ptr = GET_RTX_FORMAT (code);
2566
2567 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2568 {
2569 switch (*format_ptr++)
2570 {
2571 case 'e':
2572 verify_rtx_sharing (XEXP (x, i), insn);
2573 break;
2574
2575 case 'E':
2576 if (XVEC (x, i) != NULL)
2577 {
2578 int j;
2579 int len = XVECLEN (x, i);
2580
2581 for (j = 0; j < len; j++)
2582 {
1a2caa7a
NS
2583 /* We allow sharing of ASM_OPERANDS inside single
2584 instruction. */
2c07f13b 2585 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2586 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2587 == ASM_OPERANDS))
2c07f13b
JH
2588 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2589 else
2590 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2591 }
2592 }
2593 break;
2594 }
2595 }
2596 return;
2597}
2598
ba228239 2599/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2600 sharing in between the subexpressions. */
2601
24e47c76 2602DEBUG_FUNCTION void
2c07f13b
JH
2603verify_rtl_sharing (void)
2604{
2605 rtx p;
2606
a222c01a
MM
2607 timevar_push (TV_VERIFY_RTL_SHARING);
2608
2c07f13b
JH
2609 for (p = get_insns (); p; p = NEXT_INSN (p))
2610 if (INSN_P (p))
2611 {
2612 reset_used_flags (PATTERN (p));
2613 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2614 if (CALL_P (p))
2615 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2954a813
KK
2616 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2617 {
2618 int i;
2619 rtx q, sequence = PATTERN (p);
2620
2621 for (i = 0; i < XVECLEN (sequence, 0); i++)
2622 {
2623 q = XVECEXP (sequence, 0, i);
2624 gcc_assert (INSN_P (q));
2625 reset_used_flags (PATTERN (q));
2626 reset_used_flags (REG_NOTES (q));
776bebcd
JJ
2627 if (CALL_P (q))
2628 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2954a813
KK
2629 }
2630 }
2c07f13b
JH
2631 }
2632
2633 for (p = get_insns (); p; p = NEXT_INSN (p))
2634 if (INSN_P (p))
2635 {
2636 verify_rtx_sharing (PATTERN (p), p);
2637 verify_rtx_sharing (REG_NOTES (p), p);
776bebcd
JJ
2638 if (CALL_P (p))
2639 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2c07f13b 2640 }
a222c01a
MM
2641
2642 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2643}
2644
d1b81779
GK
2645/* Go through all the RTL insn bodies and copy any invalid shared structure.
2646 Assumes the mark bits are cleared at entry. */
2647
2c07f13b
JH
2648void
2649unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2650{
2651 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2652 if (INSN_P (insn))
d1b81779
GK
2653 {
2654 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2655 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2656 if (CALL_P (insn))
2657 CALL_INSN_FUNCTION_USAGE (insn)
2658 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2659 }
2660}
2661
2d4aecb3 2662/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2663 shared. We never replace the DECL_RTLs themselves with a copy,
2664 but expressions mentioned into a DECL_RTL cannot be shared with
2665 expressions in the instruction stream.
2666
2667 Note that reload may convert pseudo registers into memories in-place.
2668 Pseudo registers are always shared, but MEMs never are. Thus if we
2669 reset the used flags on MEMs in the instruction stream, we must set
2670 them again on MEMs that appear in DECL_RTLs. */
2671
2d4aecb3 2672static void
5eb2a9f2 2673set_used_decls (tree blk)
2d4aecb3
AO
2674{
2675 tree t;
2676
2677 /* Mark decls. */
910ad8de 2678 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2679 if (DECL_RTL_SET_P (t))
5eb2a9f2 2680 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2681
2682 /* Now process sub-blocks. */
87caf699 2683 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2684 set_used_decls (t);
2d4aecb3
AO
2685}
2686
23b2ce53 2687/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2688 Recursively does the same for subexpressions. Uses
2689 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2690
2691rtx
502b8322 2692copy_rtx_if_shared (rtx orig)
23b2ce53 2693{
32b32b16
AP
2694 copy_rtx_if_shared_1 (&orig);
2695 return orig;
2696}
2697
ff954f39
AP
2698/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2699 use. Recursively does the same for subexpressions. */
2700
32b32b16
AP
2701static void
2702copy_rtx_if_shared_1 (rtx *orig1)
2703{
2704 rtx x;
b3694847
SS
2705 int i;
2706 enum rtx_code code;
32b32b16 2707 rtx *last_ptr;
b3694847 2708 const char *format_ptr;
23b2ce53 2709 int copied = 0;
32b32b16
AP
2710 int length;
2711
2712 /* Repeat is used to turn tail-recursion into iteration. */
2713repeat:
2714 x = *orig1;
23b2ce53
RS
2715
2716 if (x == 0)
32b32b16 2717 return;
23b2ce53
RS
2718
2719 code = GET_CODE (x);
2720
2721 /* These types may be freely shared. */
2722
2723 switch (code)
2724 {
2725 case REG:
0ca5af51
AO
2726 case DEBUG_EXPR:
2727 case VALUE:
d8116890 2728 CASE_CONST_ANY:
23b2ce53 2729 case SYMBOL_REF:
2c07f13b 2730 case LABEL_REF:
23b2ce53
RS
2731 case CODE_LABEL:
2732 case PC:
2733 case CC0:
276e0224 2734 case RETURN:
26898771 2735 case SIMPLE_RETURN:
23b2ce53 2736 case SCRATCH:
0f41302f 2737 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2738 return;
3e89ed8d 2739 case CLOBBER:
c5c5ba89
JH
2740 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2741 clobbers or clobbers of hard registers that originated as pseudos.
2742 This is needed to allow safe register renaming. */
2743 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2744 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2745 return;
2746 break;
23b2ce53 2747
b851ea09 2748 case CONST:
6fb5fa3c 2749 if (shared_const_p (x))
32b32b16 2750 return;
b851ea09
RK
2751 break;
2752
b5b8b0ac 2753 case DEBUG_INSN:
23b2ce53
RS
2754 case INSN:
2755 case JUMP_INSN:
2756 case CALL_INSN:
2757 case NOTE:
23b2ce53
RS
2758 case BARRIER:
2759 /* The chain of insns is not being copied. */
32b32b16 2760 return;
23b2ce53 2761
e9a25f70
JL
2762 default:
2763 break;
23b2ce53
RS
2764 }
2765
2766 /* This rtx may not be shared. If it has already been seen,
2767 replace it with a copy of itself. */
2768
2adc7f12 2769 if (RTX_FLAG (x, used))
23b2ce53 2770 {
aacd3885 2771 x = shallow_copy_rtx (x);
23b2ce53
RS
2772 copied = 1;
2773 }
2adc7f12 2774 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2775
2776 /* Now scan the subexpressions recursively.
2777 We can store any replaced subexpressions directly into X
2778 since we know X is not shared! Any vectors in X
2779 must be copied if X was copied. */
2780
2781 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2782 length = GET_RTX_LENGTH (code);
2783 last_ptr = NULL;
b8698a0f 2784
32b32b16 2785 for (i = 0; i < length; i++)
23b2ce53
RS
2786 {
2787 switch (*format_ptr++)
2788 {
2789 case 'e':
32b32b16
AP
2790 if (last_ptr)
2791 copy_rtx_if_shared_1 (last_ptr);
2792 last_ptr = &XEXP (x, i);
23b2ce53
RS
2793 break;
2794
2795 case 'E':
2796 if (XVEC (x, i) != NULL)
2797 {
b3694847 2798 int j;
f0722107 2799 int len = XVECLEN (x, i);
b8698a0f 2800
6614fd40
KH
2801 /* Copy the vector iff I copied the rtx and the length
2802 is nonzero. */
f0722107 2803 if (copied && len > 0)
8f985ec4 2804 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2805
5d3cc252 2806 /* Call recursively on all inside the vector. */
f0722107 2807 for (j = 0; j < len; j++)
32b32b16
AP
2808 {
2809 if (last_ptr)
2810 copy_rtx_if_shared_1 (last_ptr);
2811 last_ptr = &XVECEXP (x, i, j);
2812 }
23b2ce53
RS
2813 }
2814 break;
2815 }
2816 }
32b32b16
AP
2817 *orig1 = x;
2818 if (last_ptr)
2819 {
2820 orig1 = last_ptr;
2821 goto repeat;
2822 }
2823 return;
23b2ce53
RS
2824}
2825
76369a82 2826/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 2827
76369a82
NF
2828static void
2829mark_used_flags (rtx x, int flag)
23b2ce53 2830{
b3694847
SS
2831 int i, j;
2832 enum rtx_code code;
2833 const char *format_ptr;
32b32b16 2834 int length;
23b2ce53 2835
32b32b16
AP
2836 /* Repeat is used to turn tail-recursion into iteration. */
2837repeat:
23b2ce53
RS
2838 if (x == 0)
2839 return;
2840
2841 code = GET_CODE (x);
2842
9faa82d8 2843 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2844 for them. */
2845
2846 switch (code)
2847 {
2848 case REG:
0ca5af51
AO
2849 case DEBUG_EXPR:
2850 case VALUE:
d8116890 2851 CASE_CONST_ANY:
23b2ce53
RS
2852 case SYMBOL_REF:
2853 case CODE_LABEL:
2854 case PC:
2855 case CC0:
276e0224 2856 case RETURN:
26898771 2857 case SIMPLE_RETURN:
23b2ce53
RS
2858 return;
2859
b5b8b0ac 2860 case DEBUG_INSN:
23b2ce53
RS
2861 case INSN:
2862 case JUMP_INSN:
2863 case CALL_INSN:
2864 case NOTE:
2865 case LABEL_REF:
2866 case BARRIER:
2867 /* The chain of insns is not being copied. */
2868 return;
750c9258 2869
e9a25f70
JL
2870 default:
2871 break;
23b2ce53
RS
2872 }
2873
76369a82 2874 RTX_FLAG (x, used) = flag;
23b2ce53
RS
2875
2876 format_ptr = GET_RTX_FORMAT (code);
32b32b16 2877 length = GET_RTX_LENGTH (code);
b8698a0f 2878
32b32b16 2879 for (i = 0; i < length; i++)
23b2ce53
RS
2880 {
2881 switch (*format_ptr++)
2882 {
2883 case 'e':
32b32b16
AP
2884 if (i == length-1)
2885 {
2886 x = XEXP (x, i);
2887 goto repeat;
2888 }
76369a82 2889 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
2890 break;
2891
2892 case 'E':
2893 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 2894 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
2895 break;
2896 }
2897 }
2898}
2c07f13b 2899
76369a82 2900/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
2901 to look for shared sub-parts. */
2902
2903void
76369a82 2904reset_used_flags (rtx x)
2c07f13b 2905{
76369a82
NF
2906 mark_used_flags (x, 0);
2907}
2c07f13b 2908
76369a82
NF
2909/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2910 to look for shared sub-parts. */
2c07f13b 2911
76369a82
NF
2912void
2913set_used_flags (rtx x)
2914{
2915 mark_used_flags (x, 1);
2c07f13b 2916}
23b2ce53
RS
2917\f
2918/* Copy X if necessary so that it won't be altered by changes in OTHER.
2919 Return X or the rtx for the pseudo reg the value of X was copied into.
2920 OTHER must be valid as a SET_DEST. */
2921
2922rtx
502b8322 2923make_safe_from (rtx x, rtx other)
23b2ce53
RS
2924{
2925 while (1)
2926 switch (GET_CODE (other))
2927 {
2928 case SUBREG:
2929 other = SUBREG_REG (other);
2930 break;
2931 case STRICT_LOW_PART:
2932 case SIGN_EXTEND:
2933 case ZERO_EXTEND:
2934 other = XEXP (other, 0);
2935 break;
2936 default:
2937 goto done;
2938 }
2939 done:
3c0cb5de 2940 if ((MEM_P (other)
23b2ce53 2941 && ! CONSTANT_P (x)
f8cfc6aa 2942 && !REG_P (x)
23b2ce53 2943 && GET_CODE (x) != SUBREG)
f8cfc6aa 2944 || (REG_P (other)
23b2ce53
RS
2945 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2946 || reg_mentioned_p (other, x))))
2947 {
2948 rtx temp = gen_reg_rtx (GET_MODE (x));
2949 emit_move_insn (temp, x);
2950 return temp;
2951 }
2952 return x;
2953}
2954\f
2955/* Emission of insns (adding them to the doubly-linked list). */
2956
23b2ce53
RS
2957/* Return the last insn emitted, even if it is in a sequence now pushed. */
2958
2959rtx
502b8322 2960get_last_insn_anywhere (void)
23b2ce53
RS
2961{
2962 struct sequence_stack *stack;
5936d944
JH
2963 if (get_last_insn ())
2964 return get_last_insn ();
49ad7cfa 2965 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2966 if (stack->last != 0)
2967 return stack->last;
2968 return 0;
2969}
2970
2a496e8b
JDA
2971/* Return the first nonnote insn emitted in current sequence or current
2972 function. This routine looks inside SEQUENCEs. */
2973
2974rtx
502b8322 2975get_first_nonnote_insn (void)
2a496e8b 2976{
5936d944 2977 rtx insn = get_insns ();
91373fe8
JDA
2978
2979 if (insn)
2980 {
2981 if (NOTE_P (insn))
2982 for (insn = next_insn (insn);
2983 insn && NOTE_P (insn);
2984 insn = next_insn (insn))
2985 continue;
2986 else
2987 {
2ca202e7 2988 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2989 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2990 insn = XVECEXP (PATTERN (insn), 0, 0);
2991 }
2992 }
2a496e8b
JDA
2993
2994 return insn;
2995}
2996
2997/* Return the last nonnote insn emitted in current sequence or current
2998 function. This routine looks inside SEQUENCEs. */
2999
3000rtx
502b8322 3001get_last_nonnote_insn (void)
2a496e8b 3002{
5936d944 3003 rtx insn = get_last_insn ();
91373fe8
JDA
3004
3005 if (insn)
3006 {
3007 if (NOTE_P (insn))
3008 for (insn = previous_insn (insn);
3009 insn && NOTE_P (insn);
3010 insn = previous_insn (insn))
3011 continue;
3012 else
3013 {
2ca202e7 3014 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
3015 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3016 insn = XVECEXP (PATTERN (insn), 0,
3017 XVECLEN (PATTERN (insn), 0) - 1);
3018 }
3019 }
2a496e8b
JDA
3020
3021 return insn;
3022}
3023
b5b8b0ac
AO
3024/* Return the number of actual (non-debug) insns emitted in this
3025 function. */
3026
3027int
3028get_max_insn_count (void)
3029{
3030 int n = cur_insn_uid;
3031
3032 /* The table size must be stable across -g, to avoid codegen
3033 differences due to debug insns, and not be affected by
3034 -fmin-insn-uid, to avoid excessive table size and to simplify
3035 debugging of -fcompare-debug failures. */
3036 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3037 n -= cur_debug_insn_uid;
3038 else
3039 n -= MIN_NONDEBUG_INSN_UID;
3040
3041 return n;
3042}
3043
23b2ce53
RS
3044\f
3045/* Return the next insn. If it is a SEQUENCE, return the first insn
3046 of the sequence. */
3047
3048rtx
502b8322 3049next_insn (rtx insn)
23b2ce53 3050{
75547801
KG
3051 if (insn)
3052 {
3053 insn = NEXT_INSN (insn);
3054 if (insn && NONJUMP_INSN_P (insn)
3055 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3056 insn = XVECEXP (PATTERN (insn), 0, 0);
3057 }
23b2ce53 3058
75547801 3059 return insn;
23b2ce53
RS
3060}
3061
3062/* Return the previous insn. If it is a SEQUENCE, return the last insn
3063 of the sequence. */
3064
3065rtx
502b8322 3066previous_insn (rtx insn)
23b2ce53 3067{
75547801
KG
3068 if (insn)
3069 {
3070 insn = PREV_INSN (insn);
3071 if (insn && NONJUMP_INSN_P (insn)
3072 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3073 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3074 }
23b2ce53 3075
75547801 3076 return insn;
23b2ce53
RS
3077}
3078
3079/* Return the next insn after INSN that is not a NOTE. This routine does not
3080 look inside SEQUENCEs. */
3081
3082rtx
502b8322 3083next_nonnote_insn (rtx insn)
23b2ce53 3084{
75547801
KG
3085 while (insn)
3086 {
3087 insn = NEXT_INSN (insn);
3088 if (insn == 0 || !NOTE_P (insn))
3089 break;
3090 }
23b2ce53 3091
75547801 3092 return insn;
23b2ce53
RS
3093}
3094
1e211590
DD
3095/* Return the next insn after INSN that is not a NOTE, but stop the
3096 search before we enter another basic block. This routine does not
3097 look inside SEQUENCEs. */
3098
3099rtx
3100next_nonnote_insn_bb (rtx insn)
3101{
3102 while (insn)
3103 {
3104 insn = NEXT_INSN (insn);
3105 if (insn == 0 || !NOTE_P (insn))
3106 break;
3107 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3108 return NULL_RTX;
3109 }
3110
3111 return insn;
3112}
3113
23b2ce53
RS
3114/* Return the previous insn before INSN that is not a NOTE. This routine does
3115 not look inside SEQUENCEs. */
3116
3117rtx
502b8322 3118prev_nonnote_insn (rtx insn)
23b2ce53 3119{
75547801
KG
3120 while (insn)
3121 {
3122 insn = PREV_INSN (insn);
3123 if (insn == 0 || !NOTE_P (insn))
3124 break;
3125 }
23b2ce53 3126
75547801 3127 return insn;
23b2ce53
RS
3128}
3129
896aa4ea
DD
3130/* Return the previous insn before INSN that is not a NOTE, but stop
3131 the search before we enter another basic block. This routine does
3132 not look inside SEQUENCEs. */
3133
3134rtx
3135prev_nonnote_insn_bb (rtx insn)
3136{
3137 while (insn)
3138 {
3139 insn = PREV_INSN (insn);
3140 if (insn == 0 || !NOTE_P (insn))
3141 break;
3142 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3143 return NULL_RTX;
3144 }
3145
3146 return insn;
3147}
3148
b5b8b0ac
AO
3149/* Return the next insn after INSN that is not a DEBUG_INSN. This
3150 routine does not look inside SEQUENCEs. */
3151
3152rtx
3153next_nondebug_insn (rtx insn)
3154{
3155 while (insn)
3156 {
3157 insn = NEXT_INSN (insn);
3158 if (insn == 0 || !DEBUG_INSN_P (insn))
3159 break;
3160 }
3161
3162 return insn;
3163}
3164
3165/* Return the previous insn before INSN that is not a DEBUG_INSN.
3166 This routine does not look inside SEQUENCEs. */
3167
3168rtx
3169prev_nondebug_insn (rtx insn)
3170{
3171 while (insn)
3172 {
3173 insn = PREV_INSN (insn);
3174 if (insn == 0 || !DEBUG_INSN_P (insn))
3175 break;
3176 }
3177
3178 return insn;
3179}
3180
f0fc0803
JJ
3181/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3182 This routine does not look inside SEQUENCEs. */
3183
3184rtx
3185next_nonnote_nondebug_insn (rtx insn)
3186{
3187 while (insn)
3188 {
3189 insn = NEXT_INSN (insn);
3190 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3191 break;
3192 }
3193
3194 return insn;
3195}
3196
3197/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3198 This routine does not look inside SEQUENCEs. */
3199
3200rtx
3201prev_nonnote_nondebug_insn (rtx insn)
3202{
3203 while (insn)
3204 {
3205 insn = PREV_INSN (insn);
3206 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3207 break;
3208 }
3209
3210 return insn;
3211}
3212
23b2ce53
RS
3213/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3214 or 0, if there is none. This routine does not look inside
0f41302f 3215 SEQUENCEs. */
23b2ce53
RS
3216
3217rtx
502b8322 3218next_real_insn (rtx insn)
23b2ce53 3219{
75547801
KG
3220 while (insn)
3221 {
3222 insn = NEXT_INSN (insn);
3223 if (insn == 0 || INSN_P (insn))
3224 break;
3225 }
23b2ce53 3226
75547801 3227 return insn;
23b2ce53
RS
3228}
3229
3230/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3231 or 0, if there is none. This routine does not look inside
3232 SEQUENCEs. */
3233
3234rtx
502b8322 3235prev_real_insn (rtx insn)
23b2ce53 3236{
75547801
KG
3237 while (insn)
3238 {
3239 insn = PREV_INSN (insn);
3240 if (insn == 0 || INSN_P (insn))
3241 break;
3242 }
23b2ce53 3243
75547801 3244 return insn;
23b2ce53
RS
3245}
3246
ee960939
OH
3247/* Return the last CALL_INSN in the current list, or 0 if there is none.
3248 This routine does not look inside SEQUENCEs. */
3249
3250rtx
502b8322 3251last_call_insn (void)
ee960939
OH
3252{
3253 rtx insn;
3254
3255 for (insn = get_last_insn ();
4b4bf941 3256 insn && !CALL_P (insn);
ee960939
OH
3257 insn = PREV_INSN (insn))
3258 ;
3259
3260 return insn;
3261}
3262
23b2ce53 3263/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3264 does not look inside SEQUENCEs. After reload this also skips over
3265 standalone USE and CLOBBER insn. */
23b2ce53 3266
69732dcb 3267int
4f588890 3268active_insn_p (const_rtx insn)
69732dcb 3269{
4b4bf941 3270 return (CALL_P (insn) || JUMP_P (insn)
39718607 3271 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3272 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3273 && (! reload_completed
3274 || (GET_CODE (PATTERN (insn)) != USE
3275 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3276}
3277
23b2ce53 3278rtx
502b8322 3279next_active_insn (rtx insn)
23b2ce53 3280{
75547801
KG
3281 while (insn)
3282 {
3283 insn = NEXT_INSN (insn);
3284 if (insn == 0 || active_insn_p (insn))
3285 break;
3286 }
23b2ce53 3287
75547801 3288 return insn;
23b2ce53
RS
3289}
3290
3291/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3292 does not look inside SEQUENCEs. After reload this also skips over
3293 standalone USE and CLOBBER insn. */
23b2ce53
RS
3294
3295rtx
502b8322 3296prev_active_insn (rtx insn)
23b2ce53 3297{
75547801
KG
3298 while (insn)
3299 {
3300 insn = PREV_INSN (insn);
3301 if (insn == 0 || active_insn_p (insn))
3302 break;
3303 }
23b2ce53 3304
75547801 3305 return insn;
23b2ce53
RS
3306}
3307
3308/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3309
3310rtx
502b8322 3311next_label (rtx insn)
23b2ce53 3312{
75547801
KG
3313 while (insn)
3314 {
3315 insn = NEXT_INSN (insn);
3316 if (insn == 0 || LABEL_P (insn))
3317 break;
3318 }
23b2ce53 3319
75547801 3320 return insn;
23b2ce53
RS
3321}
3322
dc0ff1c8
BS
3323/* Return the last label to mark the same position as LABEL. Return LABEL
3324 itself if it is null or any return rtx. */
6c2511d3
RS
3325
3326rtx
3327skip_consecutive_labels (rtx label)
3328{
3329 rtx insn;
3330
dc0ff1c8
BS
3331 if (label && ANY_RETURN_P (label))
3332 return label;
3333
6c2511d3
RS
3334 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3335 if (LABEL_P (insn))
3336 label = insn;
3337
3338 return label;
3339}
23b2ce53
RS
3340\f
3341#ifdef HAVE_cc0
c572e5ba
JVA
3342/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3343 and REG_CC_USER notes so we can find it. */
3344
3345void
502b8322 3346link_cc0_insns (rtx insn)
c572e5ba
JVA
3347{
3348 rtx user = next_nonnote_insn (insn);
3349
4b4bf941 3350 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3351 user = XVECEXP (PATTERN (user), 0, 0);
3352
65c5f2a6
ILT
3353 add_reg_note (user, REG_CC_SETTER, insn);
3354 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3355}
3356
23b2ce53
RS
3357/* Return the next insn that uses CC0 after INSN, which is assumed to
3358 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3359 applied to the result of this function should yield INSN).
3360
3361 Normally, this is simply the next insn. However, if a REG_CC_USER note
3362 is present, it contains the insn that uses CC0.
3363
3364 Return 0 if we can't find the insn. */
3365
3366rtx
502b8322 3367next_cc0_user (rtx insn)
23b2ce53 3368{
906c4e36 3369 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3370
3371 if (note)
3372 return XEXP (note, 0);
3373
3374 insn = next_nonnote_insn (insn);
4b4bf941 3375 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3376 insn = XVECEXP (PATTERN (insn), 0, 0);
3377
2c3c49de 3378 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3379 return insn;
3380
3381 return 0;
3382}
3383
3384/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3385 note, it is the previous insn. */
3386
3387rtx
502b8322 3388prev_cc0_setter (rtx insn)
23b2ce53 3389{
906c4e36 3390 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3391
3392 if (note)
3393 return XEXP (note, 0);
3394
3395 insn = prev_nonnote_insn (insn);
5b0264cb 3396 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3397
3398 return insn;
3399}
3400#endif
e5bef2e4 3401
594f8779
RZ
3402#ifdef AUTO_INC_DEC
3403/* Find a RTX_AUTOINC class rtx which matches DATA. */
3404
3405static int
3406find_auto_inc (rtx *xp, void *data)
3407{
3408 rtx x = *xp;
5ead67f6 3409 rtx reg = (rtx) data;
594f8779
RZ
3410
3411 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3412 return 0;
3413
3414 switch (GET_CODE (x))
3415 {
3416 case PRE_DEC:
3417 case PRE_INC:
3418 case POST_DEC:
3419 case POST_INC:
3420 case PRE_MODIFY:
3421 case POST_MODIFY:
3422 if (rtx_equal_p (reg, XEXP (x, 0)))
3423 return 1;
3424 break;
3425
3426 default:
3427 gcc_unreachable ();
3428 }
3429 return -1;
3430}
3431#endif
3432
e5bef2e4
HB
3433/* Increment the label uses for all labels present in rtx. */
3434
3435static void
502b8322 3436mark_label_nuses (rtx x)
e5bef2e4 3437{
b3694847
SS
3438 enum rtx_code code;
3439 int i, j;
3440 const char *fmt;
e5bef2e4
HB
3441
3442 code = GET_CODE (x);
7537fc90 3443 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3444 LABEL_NUSES (XEXP (x, 0))++;
3445
3446 fmt = GET_RTX_FORMAT (code);
3447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3448 {
3449 if (fmt[i] == 'e')
0fb7aeda 3450 mark_label_nuses (XEXP (x, i));
e5bef2e4 3451 else if (fmt[i] == 'E')
0fb7aeda 3452 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3453 mark_label_nuses (XVECEXP (x, i, j));
3454 }
3455}
3456
23b2ce53
RS
3457\f
3458/* Try splitting insns that can be split for better scheduling.
3459 PAT is the pattern which might split.
3460 TRIAL is the insn providing PAT.
cc2902df 3461 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3462
3463 If this routine succeeds in splitting, it returns the first or last
11147ebe 3464 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3465 returns TRIAL. If the insn to be returned can be split, it will be. */
3466
3467rtx
502b8322 3468try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3469{
3470 rtx before = PREV_INSN (trial);
3471 rtx after = NEXT_INSN (trial);
23b2ce53 3472 int has_barrier = 0;
4a8cae83 3473 rtx note, seq, tem;
6b24c259 3474 int probability;
599aedd9
RH
3475 rtx insn_last, insn;
3476 int njumps = 0;
6b24c259 3477
cd9c1ca8
RH
3478 /* We're not good at redistributing frame information. */
3479 if (RTX_FRAME_RELATED_P (trial))
3480 return trial;
3481
6b24c259
JH
3482 if (any_condjump_p (trial)
3483 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3484 split_branch_probability = INTVAL (XEXP (note, 0));
3485 probability = split_branch_probability;
3486
3487 seq = split_insns (pat, trial);
3488
3489 split_branch_probability = -1;
23b2ce53
RS
3490
3491 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3492 We may need to handle this specially. */
4b4bf941 3493 if (after && BARRIER_P (after))
23b2ce53
RS
3494 {
3495 has_barrier = 1;
3496 after = NEXT_INSN (after);
3497 }
3498
599aedd9
RH
3499 if (!seq)
3500 return trial;
3501
3502 /* Avoid infinite loop if any insn of the result matches
3503 the original pattern. */
3504 insn_last = seq;
3505 while (1)
23b2ce53 3506 {
599aedd9
RH
3507 if (INSN_P (insn_last)
3508 && rtx_equal_p (PATTERN (insn_last), pat))
3509 return trial;
3510 if (!NEXT_INSN (insn_last))
3511 break;
3512 insn_last = NEXT_INSN (insn_last);
3513 }
750c9258 3514
6fb5fa3c
DB
3515 /* We will be adding the new sequence to the function. The splitters
3516 may have introduced invalid RTL sharing, so unshare the sequence now. */
3517 unshare_all_rtl_in_chain (seq);
3518
599aedd9
RH
3519 /* Mark labels. */
3520 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3521 {
4b4bf941 3522 if (JUMP_P (insn))
599aedd9
RH
3523 {
3524 mark_jump_label (PATTERN (insn), insn, 0);
3525 njumps++;
3526 if (probability != -1
3527 && any_condjump_p (insn)
3528 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3529 {
599aedd9
RH
3530 /* We can preserve the REG_BR_PROB notes only if exactly
3531 one jump is created, otherwise the machine description
3532 is responsible for this step using
3533 split_branch_probability variable. */
5b0264cb 3534 gcc_assert (njumps == 1);
65c5f2a6 3535 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3536 }
599aedd9
RH
3537 }
3538 }
3539
3540 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3541 in SEQ and copy any additional information across. */
4b4bf941 3542 if (CALL_P (trial))
599aedd9
RH
3543 {
3544 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3545 if (CALL_P (insn))
599aedd9 3546 {
65712d5c
RS
3547 rtx next, *p;
3548
3549 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3550 target may have explicitly specified. */
3551 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3552 while (*p)
3553 p = &XEXP (*p, 1);
3554 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3555
3556 /* If the old call was a sibling call, the new one must
3557 be too. */
599aedd9 3558 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3559
3560 /* If the new call is the last instruction in the sequence,
3561 it will effectively replace the old call in-situ. Otherwise
3562 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3563 so that it comes immediately after the new call. */
3564 if (NEXT_INSN (insn))
65f3dedb
RS
3565 for (next = NEXT_INSN (trial);
3566 next && NOTE_P (next);
3567 next = NEXT_INSN (next))
3568 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3569 {
3570 remove_insn (next);
3571 add_insn_after (next, insn, NULL);
65f3dedb 3572 break;
65712d5c 3573 }
599aedd9
RH
3574 }
3575 }
4b5e8abe 3576
599aedd9
RH
3577 /* Copy notes, particularly those related to the CFG. */
3578 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3579 {
3580 switch (REG_NOTE_KIND (note))
3581 {
3582 case REG_EH_REGION:
1d65f45c 3583 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3584 break;
216183ce 3585
599aedd9
RH
3586 case REG_NORETURN:
3587 case REG_SETJMP:
0a35513e 3588 case REG_TM:
594f8779 3589 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3590 {
4b4bf941 3591 if (CALL_P (insn))
65c5f2a6 3592 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3593 }
599aedd9 3594 break;
d6e95df8 3595
599aedd9 3596 case REG_NON_LOCAL_GOTO:
594f8779 3597 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3598 {
4b4bf941 3599 if (JUMP_P (insn))
65c5f2a6 3600 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3601 }
599aedd9 3602 break;
e5bef2e4 3603
594f8779
RZ
3604#ifdef AUTO_INC_DEC
3605 case REG_INC:
3606 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3607 {
3608 rtx reg = XEXP (note, 0);
3609 if (!FIND_REG_INC_NOTE (insn, reg)
3610 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3611 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3612 }
3613 break;
3614#endif
3615
9a08d230
RH
3616 case REG_ARGS_SIZE:
3617 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3618 break;
3619
599aedd9
RH
3620 default:
3621 break;
23b2ce53 3622 }
599aedd9
RH
3623 }
3624
3625 /* If there are LABELS inside the split insns increment the
3626 usage count so we don't delete the label. */
cf7c4aa6 3627 if (INSN_P (trial))
599aedd9
RH
3628 {
3629 insn = insn_last;
3630 while (insn != NULL_RTX)
23b2ce53 3631 {
cf7c4aa6 3632 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3633 if (NONJUMP_INSN_P (insn))
599aedd9 3634 mark_label_nuses (PATTERN (insn));
23b2ce53 3635
599aedd9
RH
3636 insn = PREV_INSN (insn);
3637 }
23b2ce53
RS
3638 }
3639
5368224f 3640 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3641
3642 delete_insn (trial);
3643 if (has_barrier)
3644 emit_barrier_after (tem);
3645
3646 /* Recursively call try_split for each new insn created; by the
3647 time control returns here that insn will be fully split, so
3648 set LAST and continue from the insn after the one returned.
3649 We can't use next_active_insn here since AFTER may be a note.
3650 Ignore deleted insns, which can be occur if not optimizing. */
3651 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3652 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3653 tem = try_split (PATTERN (tem), tem, 1);
3654
3655 /* Return either the first or the last insn, depending on which was
3656 requested. */
3657 return last
5936d944 3658 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3659 : NEXT_INSN (before);
23b2ce53
RS
3660}
3661\f
3662/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3663 Store PATTERN in the pattern slots. */
23b2ce53
RS
3664
3665rtx
502b8322 3666make_insn_raw (rtx pattern)
23b2ce53 3667{
b3694847 3668 rtx insn;
23b2ce53 3669
1f8f4a0b 3670 insn = rtx_alloc (INSN);
23b2ce53 3671
43127294 3672 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3673 PATTERN (insn) = pattern;
3674 INSN_CODE (insn) = -1;
1632afca 3675 REG_NOTES (insn) = NULL;
5368224f 3676 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3677 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3678
47984720
NC
3679#ifdef ENABLE_RTL_CHECKING
3680 if (insn
2c3c49de 3681 && INSN_P (insn)
47984720
NC
3682 && (returnjump_p (insn)
3683 || (GET_CODE (insn) == SET
3684 && SET_DEST (insn) == pc_rtx)))
3685 {
d4ee4d25 3686 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3687 debug_rtx (insn);
3688 }
3689#endif
750c9258 3690
23b2ce53
RS
3691 return insn;
3692}
3693
b5b8b0ac
AO
3694/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3695
e4da1e17 3696static rtx
b5b8b0ac
AO
3697make_debug_insn_raw (rtx pattern)
3698{
3699 rtx insn;
3700
3701 insn = rtx_alloc (DEBUG_INSN);
3702 INSN_UID (insn) = cur_debug_insn_uid++;
3703 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3704 INSN_UID (insn) = cur_insn_uid++;
3705
3706 PATTERN (insn) = pattern;
3707 INSN_CODE (insn) = -1;
3708 REG_NOTES (insn) = NULL;
5368224f 3709 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3710 BLOCK_FOR_INSN (insn) = NULL;
3711
3712 return insn;
3713}
3714
2f937369 3715/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3716
e4da1e17 3717static rtx
502b8322 3718make_jump_insn_raw (rtx pattern)
23b2ce53 3719{
b3694847 3720 rtx insn;
23b2ce53 3721
4b1f5e8c 3722 insn = rtx_alloc (JUMP_INSN);
1632afca 3723 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3724
3725 PATTERN (insn) = pattern;
3726 INSN_CODE (insn) = -1;
1632afca
RS
3727 REG_NOTES (insn) = NULL;
3728 JUMP_LABEL (insn) = NULL;
5368224f 3729 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3730 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3731
3732 return insn;
3733}
aff507f4 3734
2f937369 3735/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3736
3737static rtx
502b8322 3738make_call_insn_raw (rtx pattern)
aff507f4 3739{
b3694847 3740 rtx insn;
aff507f4
RK
3741
3742 insn = rtx_alloc (CALL_INSN);
3743 INSN_UID (insn) = cur_insn_uid++;
3744
3745 PATTERN (insn) = pattern;
3746 INSN_CODE (insn) = -1;
aff507f4
RK
3747 REG_NOTES (insn) = NULL;
3748 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3749 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3750 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3751
3752 return insn;
3753}
23b2ce53
RS
3754\f
3755/* Add INSN to the end of the doubly-linked list.
3756 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3757
3758void
502b8322 3759add_insn (rtx insn)
23b2ce53 3760{
5936d944 3761 PREV_INSN (insn) = get_last_insn();
23b2ce53
RS
3762 NEXT_INSN (insn) = 0;
3763
5936d944
JH
3764 if (NULL != get_last_insn())
3765 NEXT_INSN (get_last_insn ()) = insn;
23b2ce53 3766
5936d944
JH
3767 if (NULL == get_insns ())
3768 set_first_insn (insn);
23b2ce53 3769
5936d944 3770 set_last_insn (insn);
23b2ce53
RS
3771}
3772
a0ae8e8d
RK
3773/* Add INSN into the doubly-linked list after insn AFTER. This and
3774 the next should be the only functions called to insert an insn once
ba213285 3775 delay slots have been filled since only they know how to update a
a0ae8e8d 3776 SEQUENCE. */
23b2ce53
RS
3777
3778void
6fb5fa3c 3779add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3780{
3781 rtx next = NEXT_INSN (after);
3782
5b0264cb 3783 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3784
23b2ce53
RS
3785 NEXT_INSN (insn) = next;
3786 PREV_INSN (insn) = after;
3787
3788 if (next)
3789 {
3790 PREV_INSN (next) = insn;
4b4bf941 3791 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3792 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3793 }
5936d944
JH
3794 else if (get_last_insn () == after)
3795 set_last_insn (insn);
23b2ce53
RS
3796 else
3797 {
49ad7cfa 3798 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3799 /* Scan all pending sequences too. */
3800 for (; stack; stack = stack->next)
3801 if (after == stack->last)
fef0509b
RK
3802 {
3803 stack->last = insn;
3804 break;
3805 }
a0ae8e8d 3806
5b0264cb 3807 gcc_assert (stack);
23b2ce53
RS
3808 }
3809
4b4bf941
JQ
3810 if (!BARRIER_P (after)
3811 && !BARRIER_P (insn)
3c030e88
JH
3812 && (bb = BLOCK_FOR_INSN (after)))
3813 {
3814 set_block_for_insn (insn, bb);
38c1593d 3815 if (INSN_P (insn))
6fb5fa3c 3816 df_insn_rescan (insn);
3c030e88 3817 /* Should not happen as first in the BB is always
a1f300c0 3818 either NOTE or LABEL. */
a813c111 3819 if (BB_END (bb) == after
3c030e88 3820 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3821 && !BARRIER_P (insn)
a38e7aa5 3822 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3823 BB_END (bb) = insn;
3c030e88
JH
3824 }
3825
23b2ce53 3826 NEXT_INSN (after) = insn;
4b4bf941 3827 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3828 {
3829 rtx sequence = PATTERN (after);
3830 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3831 }
3832}
3833
a0ae8e8d 3834/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3835 the previous should be the only functions called to insert an insn
3836 once delay slots have been filled since only they know how to
3837 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3838 bb from before. */
a0ae8e8d
RK
3839
3840void
6fb5fa3c 3841add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3842{
3843 rtx prev = PREV_INSN (before);
3844
5b0264cb 3845 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3846
a0ae8e8d
RK
3847 PREV_INSN (insn) = prev;
3848 NEXT_INSN (insn) = before;
3849
3850 if (prev)
3851 {
3852 NEXT_INSN (prev) = insn;
4b4bf941 3853 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3854 {
3855 rtx sequence = PATTERN (prev);
3856 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3857 }
3858 }
5936d944
JH
3859 else if (get_insns () == before)
3860 set_first_insn (insn);
a0ae8e8d
RK
3861 else
3862 {
49ad7cfa 3863 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3864 /* Scan all pending sequences too. */
3865 for (; stack; stack = stack->next)
3866 if (before == stack->first)
fef0509b
RK
3867 {
3868 stack->first = insn;
3869 break;
3870 }
a0ae8e8d 3871
5b0264cb 3872 gcc_assert (stack);
a0ae8e8d
RK
3873 }
3874
b8698a0f 3875 if (!bb
6fb5fa3c
DB
3876 && !BARRIER_P (before)
3877 && !BARRIER_P (insn))
3878 bb = BLOCK_FOR_INSN (before);
3879
3880 if (bb)
3c030e88
JH
3881 {
3882 set_block_for_insn (insn, bb);
38c1593d 3883 if (INSN_P (insn))
6fb5fa3c 3884 df_insn_rescan (insn);
5b0264cb 3885 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3886 LABEL. */
5b0264cb
NS
3887 gcc_assert (BB_HEAD (bb) != insn
3888 /* Avoid clobbering of structure when creating new BB. */
3889 || BARRIER_P (insn)
a38e7aa5 3890 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3891 }
3892
a0ae8e8d 3893 PREV_INSN (before) = insn;
4b4bf941 3894 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3895 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3896}
3897
6fb5fa3c
DB
3898
3899/* Replace insn with an deleted instruction note. */
3900
0ce2b299
EB
3901void
3902set_insn_deleted (rtx insn)
6fb5fa3c 3903{
39718607 3904 if (INSN_P (insn))
80eb8028 3905 df_insn_delete (insn);
6fb5fa3c
DB
3906 PUT_CODE (insn, NOTE);
3907 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3908}
3909
3910
89e99eea
DB
3911/* Remove an insn from its doubly-linked list. This function knows how
3912 to handle sequences. */
3913void
502b8322 3914remove_insn (rtx insn)
89e99eea
DB
3915{
3916 rtx next = NEXT_INSN (insn);
3917 rtx prev = PREV_INSN (insn);
53c17031
JH
3918 basic_block bb;
3919
89e99eea
DB
3920 if (prev)
3921 {
3922 NEXT_INSN (prev) = next;
4b4bf941 3923 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3924 {
3925 rtx sequence = PATTERN (prev);
3926 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3927 }
3928 }
5936d944
JH
3929 else if (get_insns () == insn)
3930 {
fb9ef4c1
JH
3931 if (next)
3932 PREV_INSN (next) = NULL;
5936d944
JH
3933 set_first_insn (next);
3934 }
89e99eea
DB
3935 else
3936 {
49ad7cfa 3937 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3938 /* Scan all pending sequences too. */
3939 for (; stack; stack = stack->next)
3940 if (insn == stack->first)
3941 {
3942 stack->first = next;
3943 break;
3944 }
3945
5b0264cb 3946 gcc_assert (stack);
89e99eea
DB
3947 }
3948
3949 if (next)
3950 {
3951 PREV_INSN (next) = prev;
4b4bf941 3952 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3953 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3954 }
5936d944
JH
3955 else if (get_last_insn () == insn)
3956 set_last_insn (prev);
89e99eea
DB
3957 else
3958 {
49ad7cfa 3959 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3960 /* Scan all pending sequences too. */
3961 for (; stack; stack = stack->next)
3962 if (insn == stack->last)
3963 {
3964 stack->last = prev;
3965 break;
3966 }
3967
5b0264cb 3968 gcc_assert (stack);
89e99eea 3969 }
80eb8028
SB
3970
3971 /* Invalidate data flow information associated with INSN. */
39718607 3972 if (INSN_P (insn))
80eb8028
SB
3973 df_insn_delete (insn);
3974
3975 /* Fix up basic block boundaries, if necessary. */
4b4bf941 3976 if (!BARRIER_P (insn)
53c17031
JH
3977 && (bb = BLOCK_FOR_INSN (insn)))
3978 {
a813c111 3979 if (BB_HEAD (bb) == insn)
53c17031 3980 {
3bf1e984
RK
3981 /* Never ever delete the basic block note without deleting whole
3982 basic block. */
5b0264cb 3983 gcc_assert (!NOTE_P (insn));
a813c111 3984 BB_HEAD (bb) = next;
53c17031 3985 }
a813c111
SB
3986 if (BB_END (bb) == insn)
3987 BB_END (bb) = prev;
53c17031 3988 }
89e99eea
DB
3989}
3990
ee960939
OH
3991/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3992
3993void
502b8322 3994add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 3995{
5b0264cb 3996 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
3997
3998 /* Put the register usage information on the CALL. If there is already
3999 some usage information, put ours at the end. */
4000 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4001 {
4002 rtx link;
4003
4004 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4005 link = XEXP (link, 1))
4006 ;
4007
4008 XEXP (link, 1) = call_fusage;
4009 }
4010 else
4011 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4012}
4013
23b2ce53
RS
4014/* Delete all insns made since FROM.
4015 FROM becomes the new last instruction. */
4016
4017void
502b8322 4018delete_insns_since (rtx from)
23b2ce53
RS
4019{
4020 if (from == 0)
5936d944 4021 set_first_insn (0);
23b2ce53
RS
4022 else
4023 NEXT_INSN (from) = 0;
5936d944 4024 set_last_insn (from);
23b2ce53
RS
4025}
4026
5dab5552
MS
4027/* This function is deprecated, please use sequences instead.
4028
4029 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4030 The insns to be moved are those between FROM and TO.
4031 They are moved to a new position after the insn AFTER.
4032 AFTER must not be FROM or TO or any insn in between.
4033
4034 This function does not know about SEQUENCEs and hence should not be
4035 called after delay-slot filling has been done. */
4036
4037void
502b8322 4038reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53 4039{
4f8344eb
HPN
4040#ifdef ENABLE_CHECKING
4041 rtx x;
4042 for (x = from; x != to; x = NEXT_INSN (x))
4043 gcc_assert (after != x);
4044 gcc_assert (after != to);
4045#endif
4046
23b2ce53
RS
4047 /* Splice this bunch out of where it is now. */
4048 if (PREV_INSN (from))
4049 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4050 if (NEXT_INSN (to))
4051 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4052 if (get_last_insn () == to)
4053 set_last_insn (PREV_INSN (from));
4054 if (get_insns () == from)
4055 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4056
4057 /* Make the new neighbors point to it and it to them. */
4058 if (NEXT_INSN (after))
4059 PREV_INSN (NEXT_INSN (after)) = to;
4060
4061 NEXT_INSN (to) = NEXT_INSN (after);
4062 PREV_INSN (from) = after;
4063 NEXT_INSN (after) = from;
5936d944
JH
4064 if (after == get_last_insn())
4065 set_last_insn (to);
23b2ce53
RS
4066}
4067
3c030e88
JH
4068/* Same as function above, but take care to update BB boundaries. */
4069void
502b8322 4070reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
4071{
4072 rtx prev = PREV_INSN (from);
4073 basic_block bb, bb2;
4074
4075 reorder_insns_nobb (from, to, after);
4076
4b4bf941 4077 if (!BARRIER_P (after)
3c030e88
JH
4078 && (bb = BLOCK_FOR_INSN (after)))
4079 {
4080 rtx x;
6fb5fa3c 4081 df_set_bb_dirty (bb);
68252e27 4082
4b4bf941 4083 if (!BARRIER_P (from)
3c030e88
JH
4084 && (bb2 = BLOCK_FOR_INSN (from)))
4085 {
a813c111
SB
4086 if (BB_END (bb2) == to)
4087 BB_END (bb2) = prev;
6fb5fa3c 4088 df_set_bb_dirty (bb2);
3c030e88
JH
4089 }
4090
a813c111
SB
4091 if (BB_END (bb) == after)
4092 BB_END (bb) = to;
3c030e88
JH
4093
4094 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4095 if (!BARRIER_P (x))
63642d5a 4096 df_insn_change_bb (x, bb);
3c030e88
JH
4097 }
4098}
4099
23b2ce53 4100\f
2f937369
DM
4101/* Emit insn(s) of given code and pattern
4102 at a specified place within the doubly-linked list.
23b2ce53 4103
2f937369
DM
4104 All of the emit_foo global entry points accept an object
4105 X which is either an insn list or a PATTERN of a single
4106 instruction.
23b2ce53 4107
2f937369
DM
4108 There are thus a few canonical ways to generate code and
4109 emit it at a specific place in the instruction stream. For
4110 example, consider the instruction named SPOT and the fact that
4111 we would like to emit some instructions before SPOT. We might
4112 do it like this:
23b2ce53 4113
2f937369
DM
4114 start_sequence ();
4115 ... emit the new instructions ...
4116 insns_head = get_insns ();
4117 end_sequence ();
23b2ce53 4118
2f937369 4119 emit_insn_before (insns_head, SPOT);
23b2ce53 4120
2f937369
DM
4121 It used to be common to generate SEQUENCE rtl instead, but that
4122 is a relic of the past which no longer occurs. The reason is that
4123 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4124 generated would almost certainly die right after it was created. */
23b2ce53 4125
5f02387d
NF
4126static rtx
4127emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4128 rtx (*make_raw) (rtx))
23b2ce53 4129{
b3694847 4130 rtx insn;
23b2ce53 4131
5b0264cb 4132 gcc_assert (before);
2f937369
DM
4133
4134 if (x == NULL_RTX)
4135 return last;
4136
4137 switch (GET_CODE (x))
23b2ce53 4138 {
b5b8b0ac 4139 case DEBUG_INSN:
2f937369
DM
4140 case INSN:
4141 case JUMP_INSN:
4142 case CALL_INSN:
4143 case CODE_LABEL:
4144 case BARRIER:
4145 case NOTE:
4146 insn = x;
4147 while (insn)
4148 {
4149 rtx next = NEXT_INSN (insn);
6fb5fa3c 4150 add_insn_before (insn, before, bb);
2f937369
DM
4151 last = insn;
4152 insn = next;
4153 }
4154 break;
4155
4156#ifdef ENABLE_RTL_CHECKING
4157 case SEQUENCE:
5b0264cb 4158 gcc_unreachable ();
2f937369
DM
4159 break;
4160#endif
4161
4162 default:
5f02387d 4163 last = (*make_raw) (x);
6fb5fa3c 4164 add_insn_before (last, before, bb);
2f937369 4165 break;
23b2ce53
RS
4166 }
4167
2f937369 4168 return last;
23b2ce53
RS
4169}
4170
5f02387d
NF
4171/* Make X be output before the instruction BEFORE. */
4172
4173rtx
4174emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4175{
4176 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4177}
4178
2f937369 4179/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4180 and output it before the instruction BEFORE. */
4181
4182rtx
a7102479 4183emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4184{
5f02387d
NF
4185 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4186 make_jump_insn_raw);
23b2ce53
RS
4187}
4188
2f937369 4189/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4190 and output it before the instruction BEFORE. */
4191
4192rtx
a7102479 4193emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4194{
5f02387d
NF
4195 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4196 make_call_insn_raw);
969d70ca
JH
4197}
4198
b5b8b0ac
AO
4199/* Make an instruction with body X and code DEBUG_INSN
4200 and output it before the instruction BEFORE. */
4201
4202rtx
4203emit_debug_insn_before_noloc (rtx x, rtx before)
4204{
5f02387d
NF
4205 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4206 make_debug_insn_raw);
b5b8b0ac
AO
4207}
4208
23b2ce53 4209/* Make an insn of code BARRIER
e881bb1b 4210 and output it before the insn BEFORE. */
23b2ce53
RS
4211
4212rtx
502b8322 4213emit_barrier_before (rtx before)
23b2ce53 4214{
b3694847 4215 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4216
4217 INSN_UID (insn) = cur_insn_uid++;
4218
6fb5fa3c 4219 add_insn_before (insn, before, NULL);
23b2ce53
RS
4220 return insn;
4221}
4222
e881bb1b
RH
4223/* Emit the label LABEL before the insn BEFORE. */
4224
4225rtx
502b8322 4226emit_label_before (rtx label, rtx before)
e881bb1b 4227{
468660d3
SB
4228 gcc_checking_assert (INSN_UID (label) == 0);
4229 INSN_UID (label) = cur_insn_uid++;
4230 add_insn_before (label, before, NULL);
e881bb1b
RH
4231 return label;
4232}
4233
23b2ce53
RS
4234/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4235
4236rtx
a38e7aa5 4237emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 4238{
b3694847 4239 rtx note = rtx_alloc (NOTE);
23b2ce53 4240 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4241 NOTE_KIND (note) = subtype;
ba4f7968 4242 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4243 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 4244
6fb5fa3c 4245 add_insn_before (note, before, NULL);
23b2ce53
RS
4246 return note;
4247}
4248\f
2f937369
DM
4249/* Helper for emit_insn_after, handles lists of instructions
4250 efficiently. */
23b2ce53 4251
2f937369 4252static rtx
6fb5fa3c 4253emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4254{
2f937369
DM
4255 rtx last;
4256 rtx after_after;
6fb5fa3c
DB
4257 if (!bb && !BARRIER_P (after))
4258 bb = BLOCK_FOR_INSN (after);
23b2ce53 4259
6fb5fa3c 4260 if (bb)
23b2ce53 4261 {
6fb5fa3c 4262 df_set_bb_dirty (bb);
2f937369 4263 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4264 if (!BARRIER_P (last))
6fb5fa3c
DB
4265 {
4266 set_block_for_insn (last, bb);
4267 df_insn_rescan (last);
4268 }
4b4bf941 4269 if (!BARRIER_P (last))
6fb5fa3c
DB
4270 {
4271 set_block_for_insn (last, bb);
4272 df_insn_rescan (last);
4273 }
a813c111
SB
4274 if (BB_END (bb) == after)
4275 BB_END (bb) = last;
23b2ce53
RS
4276 }
4277 else
2f937369
DM
4278 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4279 continue;
4280
4281 after_after = NEXT_INSN (after);
4282
4283 NEXT_INSN (after) = first;
4284 PREV_INSN (first) = after;
4285 NEXT_INSN (last) = after_after;
4286 if (after_after)
4287 PREV_INSN (after_after) = last;
4288
5936d944
JH
4289 if (after == get_last_insn())
4290 set_last_insn (last);
e855c69d 4291
2f937369
DM
4292 return last;
4293}
4294
5f02387d
NF
4295static rtx
4296emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4297 rtx (*make_raw)(rtx))
2f937369
DM
4298{
4299 rtx last = after;
4300
5b0264cb 4301 gcc_assert (after);
2f937369
DM
4302
4303 if (x == NULL_RTX)
4304 return last;
4305
4306 switch (GET_CODE (x))
23b2ce53 4307 {
b5b8b0ac 4308 case DEBUG_INSN:
2f937369
DM
4309 case INSN:
4310 case JUMP_INSN:
4311 case CALL_INSN:
4312 case CODE_LABEL:
4313 case BARRIER:
4314 case NOTE:
6fb5fa3c 4315 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4316 break;
4317
4318#ifdef ENABLE_RTL_CHECKING
4319 case SEQUENCE:
5b0264cb 4320 gcc_unreachable ();
2f937369
DM
4321 break;
4322#endif
4323
4324 default:
5f02387d 4325 last = (*make_raw) (x);
6fb5fa3c 4326 add_insn_after (last, after, bb);
2f937369 4327 break;
23b2ce53
RS
4328 }
4329
2f937369 4330 return last;
23b2ce53
RS
4331}
4332
5f02387d
NF
4333/* Make X be output after the insn AFTER and set the BB of insn. If
4334 BB is NULL, an attempt is made to infer the BB from AFTER. */
4335
4336rtx
4337emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4338{
4339 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4340}
4341
255680cf 4342
2f937369 4343/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4344 and output it after the insn AFTER. */
4345
4346rtx
a7102479 4347emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4348{
5f02387d 4349 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
2f937369
DM
4350}
4351
4352/* Make an instruction with body X and code CALL_INSN
4353 and output it after the instruction AFTER. */
4354
4355rtx
a7102479 4356emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4357{
5f02387d 4358 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4359}
4360
b5b8b0ac
AO
4361/* Make an instruction with body X and code CALL_INSN
4362 and output it after the instruction AFTER. */
4363
4364rtx
4365emit_debug_insn_after_noloc (rtx x, rtx after)
4366{
5f02387d 4367 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4368}
4369
23b2ce53
RS
4370/* Make an insn of code BARRIER
4371 and output it after the insn AFTER. */
4372
4373rtx
502b8322 4374emit_barrier_after (rtx after)
23b2ce53 4375{
b3694847 4376 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4377
4378 INSN_UID (insn) = cur_insn_uid++;
4379
6fb5fa3c 4380 add_insn_after (insn, after, NULL);
23b2ce53
RS
4381 return insn;
4382}
4383
4384/* Emit the label LABEL after the insn AFTER. */
4385
4386rtx
502b8322 4387emit_label_after (rtx label, rtx after)
23b2ce53 4388{
468660d3
SB
4389 gcc_checking_assert (INSN_UID (label) == 0);
4390 INSN_UID (label) = cur_insn_uid++;
4391 add_insn_after (label, after, NULL);
23b2ce53
RS
4392 return label;
4393}
4394
4395/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4396
4397rtx
a38e7aa5 4398emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4399{
b3694847 4400 rtx note = rtx_alloc (NOTE);
23b2ce53 4401 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4402 NOTE_KIND (note) = subtype;
ba4f7968 4403 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4404 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4405 add_insn_after (note, after, NULL);
23b2ce53
RS
4406 return note;
4407}
23b2ce53 4408\f
e8110d6f
NF
4409/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4410 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4411
4412static rtx
4413emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4414 rtx (*make_raw) (rtx))
0d682900 4415{
e8110d6f 4416 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4417
a7102479 4418 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4419 return last;
4420
2f937369
DM
4421 after = NEXT_INSN (after);
4422 while (1)
4423 {
5368224f
DC
4424 if (active_insn_p (after) && !INSN_LOCATION (after))
4425 INSN_LOCATION (after) = loc;
2f937369
DM
4426 if (after == last)
4427 break;
4428 after = NEXT_INSN (after);
4429 }
0d682900
JH
4430 return last;
4431}
4432
e8110d6f
NF
4433/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4434 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4435 any DEBUG_INSNs. */
4436
4437static rtx
4438emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4439 rtx (*make_raw) (rtx))
a7102479 4440{
b5b8b0ac
AO
4441 rtx prev = after;
4442
e8110d6f
NF
4443 if (skip_debug_insns)
4444 while (DEBUG_INSN_P (prev))
4445 prev = PREV_INSN (prev);
b5b8b0ac
AO
4446
4447 if (INSN_P (prev))
5368224f 4448 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4449 make_raw);
a7102479 4450 else
e8110d6f 4451 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4452}
4453
5368224f 4454/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
0d682900 4455rtx
e8110d6f 4456emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4457{
e8110d6f
NF
4458 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4459}
2f937369 4460
5368224f 4461/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
e8110d6f
NF
4462rtx
4463emit_insn_after (rtx pattern, rtx after)
4464{
4465 return emit_pattern_after (pattern, after, true, make_insn_raw);
4466}
dd3adcf8 4467
5368224f 4468/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
e8110d6f
NF
4469rtx
4470emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4471{
4472 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
0d682900
JH
4473}
4474
5368224f 4475/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
a7102479
JH
4476rtx
4477emit_jump_insn_after (rtx pattern, rtx after)
4478{
e8110d6f 4479 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
a7102479
JH
4480}
4481
5368224f 4482/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
0d682900 4483rtx
502b8322 4484emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4485{
e8110d6f 4486 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4487}
4488
5368224f 4489/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
a7102479
JH
4490rtx
4491emit_call_insn_after (rtx pattern, rtx after)
4492{
e8110d6f 4493 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4494}
4495
5368224f 4496/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
b5b8b0ac
AO
4497rtx
4498emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4499{
e8110d6f 4500 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4501}
4502
5368224f 4503/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
b5b8b0ac
AO
4504rtx
4505emit_debug_insn_after (rtx pattern, rtx after)
4506{
e8110d6f 4507 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4508}
4509
e8110d6f
NF
4510/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4511 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4512 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4513 CALL_INSN, etc. */
4514
4515static rtx
4516emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4517 rtx (*make_raw) (rtx))
0d682900
JH
4518{
4519 rtx first = PREV_INSN (before);
e8110d6f
NF
4520 rtx last = emit_pattern_before_noloc (pattern, before,
4521 insnp ? before : NULL_RTX,
4522 NULL, make_raw);
a7102479
JH
4523
4524 if (pattern == NULL_RTX || !loc)
4525 return last;
4526
26cb3993
JH
4527 if (!first)
4528 first = get_insns ();
4529 else
4530 first = NEXT_INSN (first);
a7102479
JH
4531 while (1)
4532 {
5368224f
DC
4533 if (active_insn_p (first) && !INSN_LOCATION (first))
4534 INSN_LOCATION (first) = loc;
a7102479
JH
4535 if (first == last)
4536 break;
4537 first = NEXT_INSN (first);
4538 }
4539 return last;
4540}
4541
e8110d6f
NF
4542/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4543 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4544 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4545 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4546
4547static rtx
4548emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4549 bool insnp, rtx (*make_raw) (rtx))
a7102479 4550{
b5b8b0ac
AO
4551 rtx next = before;
4552
e8110d6f
NF
4553 if (skip_debug_insns)
4554 while (DEBUG_INSN_P (next))
4555 next = PREV_INSN (next);
b5b8b0ac
AO
4556
4557 if (INSN_P (next))
5368224f 4558 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4559 insnp, make_raw);
a7102479 4560 else
e8110d6f
NF
4561 return emit_pattern_before_noloc (pattern, before,
4562 insnp ? before : NULL_RTX,
4563 NULL, make_raw);
a7102479
JH
4564}
4565
5368224f 4566/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
a7102479 4567rtx
e8110d6f 4568emit_insn_before_setloc (rtx pattern, rtx before, int loc)
a7102479 4569{
e8110d6f
NF
4570 return emit_pattern_before_setloc (pattern, before, loc, true,
4571 make_insn_raw);
4572}
a7102479 4573
5368224f 4574/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
e8110d6f
NF
4575rtx
4576emit_insn_before (rtx pattern, rtx before)
4577{
4578 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4579}
a7102479 4580
5368224f 4581/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
e8110d6f
NF
4582rtx
4583emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4584{
4585 return emit_pattern_before_setloc (pattern, before, loc, false,
4586 make_jump_insn_raw);
a7102479
JH
4587}
4588
5368224f 4589/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
a7102479
JH
4590rtx
4591emit_jump_insn_before (rtx pattern, rtx before)
4592{
e8110d6f
NF
4593 return emit_pattern_before (pattern, before, true, false,
4594 make_jump_insn_raw);
a7102479
JH
4595}
4596
5368224f 4597/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
a7102479
JH
4598rtx
4599emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4600{
e8110d6f
NF
4601 return emit_pattern_before_setloc (pattern, before, loc, false,
4602 make_call_insn_raw);
0d682900 4603}
a7102479 4604
e8110d6f 4605/* Like emit_call_insn_before_noloc,
5368224f 4606 but set insn_location according to BEFORE. */
a7102479
JH
4607rtx
4608emit_call_insn_before (rtx pattern, rtx before)
4609{
e8110d6f
NF
4610 return emit_pattern_before (pattern, before, true, false,
4611 make_call_insn_raw);
a7102479 4612}
b5b8b0ac 4613
5368224f 4614/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
b5b8b0ac
AO
4615rtx
4616emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4617{
e8110d6f
NF
4618 return emit_pattern_before_setloc (pattern, before, loc, false,
4619 make_debug_insn_raw);
b5b8b0ac
AO
4620}
4621
e8110d6f 4622/* Like emit_debug_insn_before_noloc,
5368224f 4623 but set insn_location according to BEFORE. */
b5b8b0ac
AO
4624rtx
4625emit_debug_insn_before (rtx pattern, rtx before)
4626{
e8110d6f
NF
4627 return emit_pattern_before (pattern, before, false, false,
4628 make_debug_insn_raw);
b5b8b0ac 4629}
0d682900 4630\f
2f937369
DM
4631/* Take X and emit it at the end of the doubly-linked
4632 INSN list.
23b2ce53
RS
4633
4634 Returns the last insn emitted. */
4635
4636rtx
502b8322 4637emit_insn (rtx x)
23b2ce53 4638{
5936d944 4639 rtx last = get_last_insn();
2f937369 4640 rtx insn;
23b2ce53 4641
2f937369
DM
4642 if (x == NULL_RTX)
4643 return last;
23b2ce53 4644
2f937369
DM
4645 switch (GET_CODE (x))
4646 {
b5b8b0ac 4647 case DEBUG_INSN:
2f937369
DM
4648 case INSN:
4649 case JUMP_INSN:
4650 case CALL_INSN:
4651 case CODE_LABEL:
4652 case BARRIER:
4653 case NOTE:
4654 insn = x;
4655 while (insn)
23b2ce53 4656 {
2f937369 4657 rtx next = NEXT_INSN (insn);
23b2ce53 4658 add_insn (insn);
2f937369
DM
4659 last = insn;
4660 insn = next;
23b2ce53 4661 }
2f937369 4662 break;
23b2ce53 4663
2f937369 4664#ifdef ENABLE_RTL_CHECKING
39718607 4665 case JUMP_TABLE_DATA:
2f937369 4666 case SEQUENCE:
5b0264cb 4667 gcc_unreachable ();
2f937369
DM
4668 break;
4669#endif
23b2ce53 4670
2f937369
DM
4671 default:
4672 last = make_insn_raw (x);
4673 add_insn (last);
4674 break;
23b2ce53
RS
4675 }
4676
4677 return last;
4678}
4679
b5b8b0ac
AO
4680/* Make an insn of code DEBUG_INSN with pattern X
4681 and add it to the end of the doubly-linked list. */
4682
4683rtx
4684emit_debug_insn (rtx x)
4685{
5936d944 4686 rtx last = get_last_insn();
b5b8b0ac
AO
4687 rtx insn;
4688
4689 if (x == NULL_RTX)
4690 return last;
4691
4692 switch (GET_CODE (x))
4693 {
4694 case DEBUG_INSN:
4695 case INSN:
4696 case JUMP_INSN:
4697 case CALL_INSN:
4698 case CODE_LABEL:
4699 case BARRIER:
4700 case NOTE:
4701 insn = x;
4702 while (insn)
4703 {
4704 rtx next = NEXT_INSN (insn);
4705 add_insn (insn);
4706 last = insn;
4707 insn = next;
4708 }
4709 break;
4710
4711#ifdef ENABLE_RTL_CHECKING
39718607 4712 case JUMP_TABLE_DATA:
b5b8b0ac
AO
4713 case SEQUENCE:
4714 gcc_unreachable ();
4715 break;
4716#endif
4717
4718 default:
4719 last = make_debug_insn_raw (x);
4720 add_insn (last);
4721 break;
4722 }
4723
4724 return last;
4725}
4726
2f937369
DM
4727/* Make an insn of code JUMP_INSN with pattern X
4728 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4729
4730rtx
502b8322 4731emit_jump_insn (rtx x)
23b2ce53 4732{
d950dee3 4733 rtx last = NULL_RTX, insn;
23b2ce53 4734
2f937369 4735 switch (GET_CODE (x))
23b2ce53 4736 {
b5b8b0ac 4737 case DEBUG_INSN:
2f937369
DM
4738 case INSN:
4739 case JUMP_INSN:
4740 case CALL_INSN:
4741 case CODE_LABEL:
4742 case BARRIER:
4743 case NOTE:
4744 insn = x;
4745 while (insn)
4746 {
4747 rtx next = NEXT_INSN (insn);
4748 add_insn (insn);
4749 last = insn;
4750 insn = next;
4751 }
4752 break;
e0a5c5eb 4753
2f937369 4754#ifdef ENABLE_RTL_CHECKING
39718607 4755 case JUMP_TABLE_DATA:
2f937369 4756 case SEQUENCE:
5b0264cb 4757 gcc_unreachable ();
2f937369
DM
4758 break;
4759#endif
e0a5c5eb 4760
2f937369
DM
4761 default:
4762 last = make_jump_insn_raw (x);
4763 add_insn (last);
4764 break;
3c030e88 4765 }
e0a5c5eb
RS
4766
4767 return last;
4768}
4769
2f937369 4770/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4771 and add it to the end of the doubly-linked list. */
4772
4773rtx
502b8322 4774emit_call_insn (rtx x)
23b2ce53 4775{
2f937369
DM
4776 rtx insn;
4777
4778 switch (GET_CODE (x))
23b2ce53 4779 {
b5b8b0ac 4780 case DEBUG_INSN:
2f937369
DM
4781 case INSN:
4782 case JUMP_INSN:
4783 case CALL_INSN:
4784 case CODE_LABEL:
4785 case BARRIER:
4786 case NOTE:
4787 insn = emit_insn (x);
4788 break;
23b2ce53 4789
2f937369
DM
4790#ifdef ENABLE_RTL_CHECKING
4791 case SEQUENCE:
39718607 4792 case JUMP_TABLE_DATA:
5b0264cb 4793 gcc_unreachable ();
2f937369
DM
4794 break;
4795#endif
23b2ce53 4796
2f937369
DM
4797 default:
4798 insn = make_call_insn_raw (x);
23b2ce53 4799 add_insn (insn);
2f937369 4800 break;
23b2ce53 4801 }
2f937369
DM
4802
4803 return insn;
23b2ce53
RS
4804}
4805
4806/* Add the label LABEL to the end of the doubly-linked list. */
4807
4808rtx
502b8322 4809emit_label (rtx label)
23b2ce53 4810{
468660d3
SB
4811 gcc_checking_assert (INSN_UID (label) == 0);
4812 INSN_UID (label) = cur_insn_uid++;
4813 add_insn (label);
23b2ce53
RS
4814 return label;
4815}
4816
39718607
SB
4817/* Make an insn of code JUMP_TABLE_DATA
4818 and add it to the end of the doubly-linked list. */
4819
4820rtx
4821emit_jump_table_data (rtx table)
4822{
4823 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4824 INSN_UID (jump_table_data) = cur_insn_uid++;
4825 PATTERN (jump_table_data) = table;
4826 BLOCK_FOR_INSN (jump_table_data) = NULL;
4827 add_insn (jump_table_data);
4828 return jump_table_data;
4829}
4830
23b2ce53
RS
4831/* Make an insn of code BARRIER
4832 and add it to the end of the doubly-linked list. */
4833
4834rtx
502b8322 4835emit_barrier (void)
23b2ce53 4836{
b3694847 4837 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4838 INSN_UID (barrier) = cur_insn_uid++;
4839 add_insn (barrier);
4840 return barrier;
4841}
4842
5f2fc772 4843/* Emit a copy of note ORIG. */
502b8322 4844
5f2fc772
NS
4845rtx
4846emit_note_copy (rtx orig)
4847{
4848 rtx note;
b8698a0f 4849
5f2fc772 4850 note = rtx_alloc (NOTE);
b8698a0f 4851
5f2fc772
NS
4852 INSN_UID (note) = cur_insn_uid++;
4853 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 4854 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
4855 BLOCK_FOR_INSN (note) = NULL;
4856 add_insn (note);
b8698a0f 4857
2e040219 4858 return note;
23b2ce53
RS
4859}
4860
2e040219
NS
4861/* Make an insn of code NOTE or type NOTE_NO
4862 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4863
4864rtx
a38e7aa5 4865emit_note (enum insn_note kind)
23b2ce53 4866{
b3694847 4867 rtx note;
23b2ce53 4868
23b2ce53
RS
4869 note = rtx_alloc (NOTE);
4870 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4871 NOTE_KIND (note) = kind;
dd107e66 4872 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4873 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4874 add_insn (note);
4875 return note;
4876}
4877
c41c1387
RS
4878/* Emit a clobber of lvalue X. */
4879
4880rtx
4881emit_clobber (rtx x)
4882{
4883 /* CONCATs should not appear in the insn stream. */
4884 if (GET_CODE (x) == CONCAT)
4885 {
4886 emit_clobber (XEXP (x, 0));
4887 return emit_clobber (XEXP (x, 1));
4888 }
4889 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4890}
4891
4892/* Return a sequence of insns to clobber lvalue X. */
4893
4894rtx
4895gen_clobber (rtx x)
4896{
4897 rtx seq;
4898
4899 start_sequence ();
4900 emit_clobber (x);
4901 seq = get_insns ();
4902 end_sequence ();
4903 return seq;
4904}
4905
4906/* Emit a use of rvalue X. */
4907
4908rtx
4909emit_use (rtx x)
4910{
4911 /* CONCATs should not appear in the insn stream. */
4912 if (GET_CODE (x) == CONCAT)
4913 {
4914 emit_use (XEXP (x, 0));
4915 return emit_use (XEXP (x, 1));
4916 }
4917 return emit_insn (gen_rtx_USE (VOIDmode, x));
4918}
4919
4920/* Return a sequence of insns to use rvalue X. */
4921
4922rtx
4923gen_use (rtx x)
4924{
4925 rtx seq;
4926
4927 start_sequence ();
4928 emit_use (x);
4929 seq = get_insns ();
4930 end_sequence ();
4931 return seq;
4932}
4933
87b47c85 4934/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4935 note of this type already exists, remove it first. */
87b47c85 4936
3d238248 4937rtx
502b8322 4938set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4939{
4940 rtx note = find_reg_note (insn, kind, NULL_RTX);
4941
52488da1
JW
4942 switch (kind)
4943 {
4944 case REG_EQUAL:
4945 case REG_EQUIV:
4946 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4947 has multiple sets (some callers assume single_set
4948 means the insn only has one set, when in fact it
4949 means the insn only has one * useful * set). */
4950 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4951 {
5b0264cb 4952 gcc_assert (!note);
52488da1
JW
4953 return NULL_RTX;
4954 }
4955
4956 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4957 It serves no useful purpose and breaks eliminate_regs. */
4958 if (GET_CODE (datum) == ASM_OPERANDS)
4959 return NULL_RTX;
6fb5fa3c
DB
4960
4961 if (note)
4962 {
4963 XEXP (note, 0) = datum;
4964 df_notes_rescan (insn);
4965 return note;
4966 }
52488da1
JW
4967 break;
4968
4969 default:
6fb5fa3c
DB
4970 if (note)
4971 {
4972 XEXP (note, 0) = datum;
4973 return note;
4974 }
52488da1
JW
4975 break;
4976 }
3d238248 4977
65c5f2a6 4978 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
4979
4980 switch (kind)
3d238248 4981 {
6fb5fa3c
DB
4982 case REG_EQUAL:
4983 case REG_EQUIV:
4984 df_notes_rescan (insn);
4985 break;
4986 default:
4987 break;
3d238248 4988 }
87b47c85 4989
3d238248 4990 return REG_NOTES (insn);
87b47c85 4991}
7543f918
JR
4992
4993/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
4994rtx
4995set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
4996{
4997 rtx set = single_set (insn);
4998
4999 if (set && SET_DEST (set) == dst)
5000 return set_unique_reg_note (insn, kind, datum);
5001 return NULL_RTX;
5002}
23b2ce53
RS
5003\f
5004/* Return an indication of which type of insn should have X as a body.
5005 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5006
d78db459 5007static enum rtx_code
502b8322 5008classify_insn (rtx x)
23b2ce53 5009{
4b4bf941 5010 if (LABEL_P (x))
23b2ce53
RS
5011 return CODE_LABEL;
5012 if (GET_CODE (x) == CALL)
5013 return CALL_INSN;
26898771 5014 if (ANY_RETURN_P (x))
23b2ce53
RS
5015 return JUMP_INSN;
5016 if (GET_CODE (x) == SET)
5017 {
5018 if (SET_DEST (x) == pc_rtx)
5019 return JUMP_INSN;
5020 else if (GET_CODE (SET_SRC (x)) == CALL)
5021 return CALL_INSN;
5022 else
5023 return INSN;
5024 }
5025 if (GET_CODE (x) == PARALLEL)
5026 {
b3694847 5027 int j;
23b2ce53
RS
5028 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5029 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5030 return CALL_INSN;
5031 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5032 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5033 return JUMP_INSN;
5034 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5035 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5036 return CALL_INSN;
5037 }
5038 return INSN;
5039}
5040
5041/* Emit the rtl pattern X as an appropriate kind of insn.
5042 If X is a label, it is simply added into the insn chain. */
5043
5044rtx
502b8322 5045emit (rtx x)
23b2ce53
RS
5046{
5047 enum rtx_code code = classify_insn (x);
5048
5b0264cb 5049 switch (code)
23b2ce53 5050 {
5b0264cb
NS
5051 case CODE_LABEL:
5052 return emit_label (x);
5053 case INSN:
5054 return emit_insn (x);
5055 case JUMP_INSN:
5056 {
5057 rtx insn = emit_jump_insn (x);
5058 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5059 return emit_barrier ();
5060 return insn;
5061 }
5062 case CALL_INSN:
5063 return emit_call_insn (x);
b5b8b0ac
AO
5064 case DEBUG_INSN:
5065 return emit_debug_insn (x);
5b0264cb
NS
5066 default:
5067 gcc_unreachable ();
23b2ce53 5068 }
23b2ce53
RS
5069}
5070\f
e2500fed 5071/* Space for free sequence stack entries. */
1431042e 5072static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5073
4dfa0342
RH
5074/* Begin emitting insns to a sequence. If this sequence will contain
5075 something that might cause the compiler to pop arguments to function
5076 calls (because those pops have previously been deferred; see
5077 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5078 before calling this function. That will ensure that the deferred
5079 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5080
5081void
502b8322 5082start_sequence (void)
23b2ce53
RS
5083{
5084 struct sequence_stack *tem;
5085
e2500fed
GK
5086 if (free_sequence_stack != NULL)
5087 {
5088 tem = free_sequence_stack;
5089 free_sequence_stack = tem->next;
5090 }
5091 else
a9429e29 5092 tem = ggc_alloc_sequence_stack ();
23b2ce53 5093
49ad7cfa 5094 tem->next = seq_stack;
5936d944
JH
5095 tem->first = get_insns ();
5096 tem->last = get_last_insn ();
23b2ce53 5097
49ad7cfa 5098 seq_stack = tem;
23b2ce53 5099
5936d944
JH
5100 set_first_insn (0);
5101 set_last_insn (0);
23b2ce53
RS
5102}
5103
5c7a310f
MM
5104/* Set up the insn chain starting with FIRST as the current sequence,
5105 saving the previously current one. See the documentation for
5106 start_sequence for more information about how to use this function. */
23b2ce53
RS
5107
5108void
502b8322 5109push_to_sequence (rtx first)
23b2ce53
RS
5110{
5111 rtx last;
5112
5113 start_sequence ();
5114
e84a58ff
EB
5115 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5116 ;
23b2ce53 5117
5936d944
JH
5118 set_first_insn (first);
5119 set_last_insn (last);
23b2ce53
RS
5120}
5121
bb27eeda
SE
5122/* Like push_to_sequence, but take the last insn as an argument to avoid
5123 looping through the list. */
5124
5125void
5126push_to_sequence2 (rtx first, rtx last)
5127{
5128 start_sequence ();
5129
5936d944
JH
5130 set_first_insn (first);
5131 set_last_insn (last);
bb27eeda
SE
5132}
5133
f15ae3a1
TW
5134/* Set up the outer-level insn chain
5135 as the current sequence, saving the previously current one. */
5136
5137void
502b8322 5138push_topmost_sequence (void)
f15ae3a1 5139{
aefdd5ab 5140 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5141
5142 start_sequence ();
5143
49ad7cfa 5144 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5145 top = stack;
5146
5936d944
JH
5147 set_first_insn (top->first);
5148 set_last_insn (top->last);
f15ae3a1
TW
5149}
5150
5151/* After emitting to the outer-level insn chain, update the outer-level
5152 insn chain, and restore the previous saved state. */
5153
5154void
502b8322 5155pop_topmost_sequence (void)
f15ae3a1 5156{
aefdd5ab 5157 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5158
49ad7cfa 5159 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5160 top = stack;
5161
5936d944
JH
5162 top->first = get_insns ();
5163 top->last = get_last_insn ();
f15ae3a1
TW
5164
5165 end_sequence ();
5166}
5167
23b2ce53
RS
5168/* After emitting to a sequence, restore previous saved state.
5169
5c7a310f 5170 To get the contents of the sequence just made, you must call
2f937369 5171 `get_insns' *before* calling here.
5c7a310f
MM
5172
5173 If the compiler might have deferred popping arguments while
5174 generating this sequence, and this sequence will not be immediately
5175 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5176 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5177 pops are inserted into this sequence, and not into some random
5178 location in the instruction stream. See INHIBIT_DEFER_POP for more
5179 information about deferred popping of arguments. */
23b2ce53
RS
5180
5181void
502b8322 5182end_sequence (void)
23b2ce53 5183{
49ad7cfa 5184 struct sequence_stack *tem = seq_stack;
23b2ce53 5185
5936d944
JH
5186 set_first_insn (tem->first);
5187 set_last_insn (tem->last);
49ad7cfa 5188 seq_stack = tem->next;
23b2ce53 5189
e2500fed
GK
5190 memset (tem, 0, sizeof (*tem));
5191 tem->next = free_sequence_stack;
5192 free_sequence_stack = tem;
23b2ce53
RS
5193}
5194
5195/* Return 1 if currently emitting into a sequence. */
5196
5197int
502b8322 5198in_sequence_p (void)
23b2ce53 5199{
49ad7cfa 5200 return seq_stack != 0;
23b2ce53 5201}
23b2ce53 5202\f
59ec66dc
MM
5203/* Put the various virtual registers into REGNO_REG_RTX. */
5204
2bbdec73 5205static void
bd60bab2 5206init_virtual_regs (void)
59ec66dc 5207{
bd60bab2
JH
5208 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5209 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5210 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5211 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5212 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5213 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5214 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5215}
5216
da43a810
BS
5217\f
5218/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5219static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5220static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5221static int copy_insn_n_scratches;
5222
5223/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5224 copied an ASM_OPERANDS.
5225 In that case, it is the original input-operand vector. */
5226static rtvec orig_asm_operands_vector;
5227
5228/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5229 copied an ASM_OPERANDS.
5230 In that case, it is the copied input-operand vector. */
5231static rtvec copy_asm_operands_vector;
5232
5233/* Likewise for the constraints vector. */
5234static rtvec orig_asm_constraints_vector;
5235static rtvec copy_asm_constraints_vector;
5236
5237/* Recursively create a new copy of an rtx for copy_insn.
5238 This function differs from copy_rtx in that it handles SCRATCHes and
5239 ASM_OPERANDs properly.
5240 Normally, this function is not used directly; use copy_insn as front end.
5241 However, you could first copy an insn pattern with copy_insn and then use
5242 this function afterwards to properly copy any REG_NOTEs containing
5243 SCRATCHes. */
5244
5245rtx
502b8322 5246copy_insn_1 (rtx orig)
da43a810 5247{
b3694847
SS
5248 rtx copy;
5249 int i, j;
5250 RTX_CODE code;
5251 const char *format_ptr;
da43a810 5252
cd9c1ca8
RH
5253 if (orig == NULL)
5254 return NULL;
5255
da43a810
BS
5256 code = GET_CODE (orig);
5257
5258 switch (code)
5259 {
5260 case REG:
a52a87c3 5261 case DEBUG_EXPR:
d8116890 5262 CASE_CONST_ANY:
da43a810
BS
5263 case SYMBOL_REF:
5264 case CODE_LABEL:
5265 case PC:
5266 case CC0:
276e0224 5267 case RETURN:
26898771 5268 case SIMPLE_RETURN:
da43a810 5269 return orig;
3e89ed8d 5270 case CLOBBER:
c5c5ba89
JH
5271 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5272 clobbers or clobbers of hard registers that originated as pseudos.
5273 This is needed to allow safe register renaming. */
5274 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5275 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
3e89ed8d
JH
5276 return orig;
5277 break;
da43a810
BS
5278
5279 case SCRATCH:
5280 for (i = 0; i < copy_insn_n_scratches; i++)
5281 if (copy_insn_scratch_in[i] == orig)
5282 return copy_insn_scratch_out[i];
5283 break;
5284
5285 case CONST:
6fb5fa3c 5286 if (shared_const_p (orig))
da43a810
BS
5287 return orig;
5288 break;
750c9258 5289
da43a810
BS
5290 /* A MEM with a constant address is not sharable. The problem is that
5291 the constant address may need to be reloaded. If the mem is shared,
5292 then reloading one copy of this mem will cause all copies to appear
5293 to have been reloaded. */
5294
5295 default:
5296 break;
5297 }
5298
aacd3885
RS
5299 /* Copy the various flags, fields, and other information. We assume
5300 that all fields need copying, and then clear the fields that should
da43a810
BS
5301 not be copied. That is the sensible default behavior, and forces
5302 us to explicitly document why we are *not* copying a flag. */
aacd3885 5303 copy = shallow_copy_rtx (orig);
da43a810
BS
5304
5305 /* We do not copy the USED flag, which is used as a mark bit during
5306 walks over the RTL. */
2adc7f12 5307 RTX_FLAG (copy, used) = 0;
da43a810
BS
5308
5309 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5310 if (INSN_P (orig))
da43a810 5311 {
2adc7f12
JJ
5312 RTX_FLAG (copy, jump) = 0;
5313 RTX_FLAG (copy, call) = 0;
5314 RTX_FLAG (copy, frame_related) = 0;
da43a810 5315 }
750c9258 5316
da43a810
BS
5317 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5318
5319 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5320 switch (*format_ptr++)
5321 {
5322 case 'e':
5323 if (XEXP (orig, i) != NULL)
5324 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5325 break;
da43a810 5326
aacd3885
RS
5327 case 'E':
5328 case 'V':
5329 if (XVEC (orig, i) == orig_asm_constraints_vector)
5330 XVEC (copy, i) = copy_asm_constraints_vector;
5331 else if (XVEC (orig, i) == orig_asm_operands_vector)
5332 XVEC (copy, i) = copy_asm_operands_vector;
5333 else if (XVEC (orig, i) != NULL)
5334 {
5335 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5336 for (j = 0; j < XVECLEN (copy, i); j++)
5337 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5338 }
5339 break;
da43a810 5340
aacd3885
RS
5341 case 't':
5342 case 'w':
5343 case 'i':
5344 case 's':
5345 case 'S':
5346 case 'u':
5347 case '0':
5348 /* These are left unchanged. */
5349 break;
da43a810 5350
aacd3885
RS
5351 default:
5352 gcc_unreachable ();
5353 }
da43a810
BS
5354
5355 if (code == SCRATCH)
5356 {
5357 i = copy_insn_n_scratches++;
5b0264cb 5358 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5359 copy_insn_scratch_in[i] = orig;
5360 copy_insn_scratch_out[i] = copy;
5361 }
5362 else if (code == ASM_OPERANDS)
5363 {
6462bb43
AO
5364 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5365 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5366 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5367 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5368 }
5369
5370 return copy;
5371}
5372
5373/* Create a new copy of an rtx.
5374 This function differs from copy_rtx in that it handles SCRATCHes and
5375 ASM_OPERANDs properly.
5376 INSN doesn't really have to be a full INSN; it could be just the
5377 pattern. */
5378rtx
502b8322 5379copy_insn (rtx insn)
da43a810
BS
5380{
5381 copy_insn_n_scratches = 0;
5382 orig_asm_operands_vector = 0;
5383 orig_asm_constraints_vector = 0;
5384 copy_asm_operands_vector = 0;
5385 copy_asm_constraints_vector = 0;
5386 return copy_insn_1 (insn);
5387}
59ec66dc 5388
8e383849
JR
5389/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5390 on that assumption that INSN itself remains in its original place. */
5391
5392rtx
5393copy_delay_slot_insn (rtx insn)
5394{
5395 /* Copy INSN with its rtx_code, all its notes, location etc. */
5396 insn = copy_rtx (insn);
5397 INSN_UID (insn) = cur_insn_uid++;
5398 return insn;
5399}
5400
23b2ce53
RS
5401/* Initialize data structures and variables in this file
5402 before generating rtl for each function. */
5403
5404void
502b8322 5405init_emit (void)
23b2ce53 5406{
5936d944
JH
5407 set_first_insn (NULL);
5408 set_last_insn (NULL);
b5b8b0ac
AO
5409 if (MIN_NONDEBUG_INSN_UID)
5410 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5411 else
5412 cur_insn_uid = 1;
5413 cur_debug_insn_uid = 1;
23b2ce53 5414 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5415 first_label_num = label_num;
49ad7cfa 5416 seq_stack = NULL;
23b2ce53 5417
23b2ce53
RS
5418 /* Init the tables that describe all the pseudo regs. */
5419
3e029763 5420 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5421
3e029763 5422 crtl->emit.regno_pointer_align
1b4572a8 5423 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5424
a9429e29 5425 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
0d4903b8 5426
e50126e8 5427 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5428 memcpy (regno_reg_rtx,
5fb0e246 5429 initial_regno_reg_rtx,
6cde4876 5430 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5431
23b2ce53 5432 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5433 init_virtual_regs ();
740ab4a2
RK
5434
5435 /* Indicate that the virtual registers and stack locations are
5436 all pointers. */
3502dc9c
JDA
5437 REG_POINTER (stack_pointer_rtx) = 1;
5438 REG_POINTER (frame_pointer_rtx) = 1;
5439 REG_POINTER (hard_frame_pointer_rtx) = 1;
5440 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5441
3502dc9c
JDA
5442 REG_POINTER (virtual_incoming_args_rtx) = 1;
5443 REG_POINTER (virtual_stack_vars_rtx) = 1;
5444 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5445 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5446 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5447
86fe05e0 5448#ifdef STACK_BOUNDARY
bdb429a5
RK
5449 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5450 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5451 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5452 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5453
5454 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5455 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5456 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5457 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5458 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5459#endif
5460
5e82e7bd
JVA
5461#ifdef INIT_EXPANDERS
5462 INIT_EXPANDERS;
5463#endif
23b2ce53
RS
5464}
5465
a73b091d 5466/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5467
5468static rtx
a73b091d 5469gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5470{
5471 rtx tem;
5472 rtvec v;
5473 int units, i;
5474 enum machine_mode inner;
5475
5476 units = GET_MODE_NUNITS (mode);
5477 inner = GET_MODE_INNER (mode);
5478
15ed7b52
JG
5479 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5480
69ef87e2
AH
5481 v = rtvec_alloc (units);
5482
a73b091d
JW
5483 /* We need to call this function after we set the scalar const_tiny_rtx
5484 entries. */
5485 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5486
5487 for (i = 0; i < units; ++i)
a73b091d 5488 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5489
a06e3c40 5490 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5491 return tem;
5492}
5493
a06e3c40 5494/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5495 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5496rtx
502b8322 5497gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5498{
a73b091d
JW
5499 enum machine_mode inner = GET_MODE_INNER (mode);
5500 int nunits = GET_MODE_NUNITS (mode);
5501 rtx x;
a06e3c40
R
5502 int i;
5503
a73b091d
JW
5504 /* Check to see if all of the elements have the same value. */
5505 x = RTVEC_ELT (v, nunits - 1);
5506 for (i = nunits - 2; i >= 0; i--)
5507 if (RTVEC_ELT (v, i) != x)
5508 break;
5509
5510 /* If the values are all the same, check to see if we can use one of the
5511 standard constant vectors. */
5512 if (i == -1)
5513 {
5514 if (x == CONST0_RTX (inner))
5515 return CONST0_RTX (mode);
5516 else if (x == CONST1_RTX (inner))
5517 return CONST1_RTX (mode);
e7c82a99
JJ
5518 else if (x == CONSTM1_RTX (inner))
5519 return CONSTM1_RTX (mode);
a73b091d
JW
5520 }
5521
5522 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5523}
5524
b5deb7b6
SL
5525/* Initialise global register information required by all functions. */
5526
5527void
5528init_emit_regs (void)
5529{
5530 int i;
1c3f523e
RS
5531 enum machine_mode mode;
5532 mem_attrs *attrs;
b5deb7b6
SL
5533
5534 /* Reset register attributes */
5535 htab_empty (reg_attrs_htab);
5536
5537 /* We need reg_raw_mode, so initialize the modes now. */
5538 init_reg_modes_target ();
5539
5540 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5541 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5542 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5543 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5544 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5545 virtual_incoming_args_rtx =
5546 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5547 virtual_stack_vars_rtx =
5548 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5549 virtual_stack_dynamic_rtx =
5550 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5551 virtual_outgoing_args_rtx =
5552 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5553 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5554 virtual_preferred_stack_boundary_rtx =
5555 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5556
5557 /* Initialize RTL for commonly used hard registers. These are
5558 copied into regno_reg_rtx as we begin to compile each function. */
5559 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5560 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5561
5562#ifdef RETURN_ADDRESS_POINTER_REGNUM
5563 return_address_pointer_rtx
5564 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5565#endif
5566
b5deb7b6
SL
5567 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5568 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5569 else
5570 pic_offset_table_rtx = NULL_RTX;
1c3f523e
RS
5571
5572 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5573 {
5574 mode = (enum machine_mode) i;
5575 attrs = ggc_alloc_cleared_mem_attrs ();
5576 attrs->align = BITS_PER_UNIT;
5577 attrs->addrspace = ADDR_SPACE_GENERIC;
5578 if (mode != BLKmode)
5579 {
754c3d5d
RS
5580 attrs->size_known_p = true;
5581 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5582 if (STRICT_ALIGNMENT)
5583 attrs->align = GET_MODE_ALIGNMENT (mode);
5584 }
5585 mode_mem_attrs[i] = attrs;
5586 }
b5deb7b6
SL
5587}
5588
2d888286 5589/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5590
5591void
2d888286 5592init_emit_once (void)
23b2ce53
RS
5593{
5594 int i;
5595 enum machine_mode mode;
9ec36da5 5596 enum machine_mode double_mode;
23b2ce53 5597
091a3ac7
CF
5598 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5599 hash tables. */
17211ab5
GK
5600 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5601 const_int_htab_eq, NULL);
173b24b9 5602
17211ab5
GK
5603 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5604 const_double_htab_eq, NULL);
5692c7bc 5605
091a3ac7
CF
5606 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5607 const_fixed_htab_eq, NULL);
5608
17211ab5
GK
5609 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5610 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5611 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5612 reg_attrs_htab_eq, NULL);
67673f5c 5613
43fa6302
AS
5614 /* Compute the word and byte modes. */
5615
5616 byte_mode = VOIDmode;
5617 word_mode = VOIDmode;
5618 double_mode = VOIDmode;
5619
15ed7b52
JG
5620 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5621 mode != VOIDmode;
43fa6302
AS
5622 mode = GET_MODE_WIDER_MODE (mode))
5623 {
5624 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5625 && byte_mode == VOIDmode)
5626 byte_mode = mode;
5627
5628 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5629 && word_mode == VOIDmode)
5630 word_mode = mode;
5631 }
5632
15ed7b52
JG
5633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5634 mode != VOIDmode;
43fa6302
AS
5635 mode = GET_MODE_WIDER_MODE (mode))
5636 {
5637 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5638 && double_mode == VOIDmode)
5639 double_mode = mode;
5640 }
5641
5642 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5643
5da077de 5644#ifdef INIT_EXPANDERS
414c4dc4
NC
5645 /* This is to initialize {init|mark|free}_machine_status before the first
5646 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5647 end which calls push_function_context_to before the first call to
5da077de
AS
5648 init_function_start. */
5649 INIT_EXPANDERS;
5650#endif
5651
23b2ce53
RS
5652 /* Create the unique rtx's for certain rtx codes and operand values. */
5653
a2a8cc44 5654 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5655 tries to use these variables. */
23b2ce53 5656 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5657 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5658 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5659
68d75312
JC
5660 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5661 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5662 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5663 else
3b80f6ca 5664 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5665
5692c7bc
ZW
5666 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5667 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5668 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5669
5670 dconstm1 = dconst1;
5671 dconstm1.sign = 1;
03f2ea93
RS
5672
5673 dconsthalf = dconst1;
1e92bbb9 5674 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5675
e7c82a99 5676 for (i = 0; i < 3; i++)
23b2ce53 5677 {
aefa9d43 5678 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5679 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5680
15ed7b52
JG
5681 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5682 mode != VOIDmode;
5683 mode = GET_MODE_WIDER_MODE (mode))
5684 const_tiny_rtx[i][(int) mode] =
5685 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5686
5687 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5688 mode != VOIDmode;
23b2ce53 5689 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5690 const_tiny_rtx[i][(int) mode] =
5691 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5692
906c4e36 5693 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5694
15ed7b52
JG
5695 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5696 mode != VOIDmode;
23b2ce53 5697 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5698 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5699
ede6c734
MS
5700 for (mode = MIN_MODE_PARTIAL_INT;
5701 mode <= MAX_MODE_PARTIAL_INT;
5702 mode = (enum machine_mode)((int)(mode) + 1))
33d3e559 5703 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5704 }
5705
e7c82a99
JJ
5706 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5707
5708 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5709 mode != VOIDmode;
5710 mode = GET_MODE_WIDER_MODE (mode))
5711 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5712
ede6c734
MS
5713 for (mode = MIN_MODE_PARTIAL_INT;
5714 mode <= MAX_MODE_PARTIAL_INT;
5715 mode = (enum machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5716 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5717
e90721b1
AP
5718 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5719 mode != VOIDmode;
5720 mode = GET_MODE_WIDER_MODE (mode))
5721 {
5722 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5723 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5724 }
5725
5726 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5727 mode != VOIDmode;
5728 mode = GET_MODE_WIDER_MODE (mode))
5729 {
5730 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5731 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5732 }
5733
69ef87e2
AH
5734 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5735 mode != VOIDmode;
5736 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5737 {
5738 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5739 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 5740 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 5741 }
69ef87e2
AH
5742
5743 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5744 mode != VOIDmode;
5745 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5746 {
5747 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5748 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5749 }
69ef87e2 5750
325217ed
CF
5751 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5752 mode != VOIDmode;
5753 mode = GET_MODE_WIDER_MODE (mode))
5754 {
5755 FCONST0(mode).data.high = 0;
5756 FCONST0(mode).data.low = 0;
5757 FCONST0(mode).mode = mode;
091a3ac7
CF
5758 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5759 FCONST0 (mode), mode);
325217ed
CF
5760 }
5761
5762 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5763 mode != VOIDmode;
5764 mode = GET_MODE_WIDER_MODE (mode))
5765 {
5766 FCONST0(mode).data.high = 0;
5767 FCONST0(mode).data.low = 0;
5768 FCONST0(mode).mode = mode;
091a3ac7
CF
5769 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5770 FCONST0 (mode), mode);
325217ed
CF
5771 }
5772
5773 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5774 mode != VOIDmode;
5775 mode = GET_MODE_WIDER_MODE (mode))
5776 {
5777 FCONST0(mode).data.high = 0;
5778 FCONST0(mode).data.low = 0;
5779 FCONST0(mode).mode = mode;
091a3ac7
CF
5780 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5781 FCONST0 (mode), mode);
325217ed
CF
5782
5783 /* We store the value 1. */
5784 FCONST1(mode).data.high = 0;
5785 FCONST1(mode).data.low = 0;
5786 FCONST1(mode).mode = mode;
9be0ac8c
LC
5787 FCONST1(mode).data
5788 = double_int_one.lshift (GET_MODE_FBIT (mode),
5789 HOST_BITS_PER_DOUBLE_INT,
5790 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5791 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5792 FCONST1 (mode), mode);
325217ed
CF
5793 }
5794
5795 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5796 mode != VOIDmode;
5797 mode = GET_MODE_WIDER_MODE (mode))
5798 {
5799 FCONST0(mode).data.high = 0;
5800 FCONST0(mode).data.low = 0;
5801 FCONST0(mode).mode = mode;
091a3ac7
CF
5802 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5803 FCONST0 (mode), mode);
325217ed
CF
5804
5805 /* We store the value 1. */
5806 FCONST1(mode).data.high = 0;
5807 FCONST1(mode).data.low = 0;
5808 FCONST1(mode).mode = mode;
9be0ac8c
LC
5809 FCONST1(mode).data
5810 = double_int_one.lshift (GET_MODE_FBIT (mode),
5811 HOST_BITS_PER_DOUBLE_INT,
5812 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5813 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5814 FCONST1 (mode), mode);
5815 }
5816
5817 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5818 mode != VOIDmode;
5819 mode = GET_MODE_WIDER_MODE (mode))
5820 {
5821 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5822 }
5823
5824 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5825 mode != VOIDmode;
5826 mode = GET_MODE_WIDER_MODE (mode))
5827 {
5828 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5829 }
5830
5831 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5832 mode != VOIDmode;
5833 mode = GET_MODE_WIDER_MODE (mode))
5834 {
5835 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5836 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5837 }
5838
5839 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5840 mode != VOIDmode;
5841 mode = GET_MODE_WIDER_MODE (mode))
5842 {
5843 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5844 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5845 }
5846
dbbbbf3b
JDA
5847 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5848 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5849 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5850
f0417c82
RH
5851 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5852 if (STORE_FLAG_VALUE == 1)
5853 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91
RS
5854
5855 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5856 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5857 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5858 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
23b2ce53 5859}
a11759a3 5860\f
969d70ca
JH
5861/* Produce exact duplicate of insn INSN after AFTER.
5862 Care updating of libcall regions if present. */
5863
5864rtx
502b8322 5865emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5866{
60564289 5867 rtx new_rtx, link;
969d70ca
JH
5868
5869 switch (GET_CODE (insn))
5870 {
5871 case INSN:
60564289 5872 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5873 break;
5874
5875 case JUMP_INSN:
60564289 5876 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5877 break;
5878
b5b8b0ac
AO
5879 case DEBUG_INSN:
5880 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5881 break;
5882
969d70ca 5883 case CALL_INSN:
60564289 5884 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5885 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5886 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5887 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5888 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5889 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5890 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 5891 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 5892 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
5893 break;
5894
5895 default:
5b0264cb 5896 gcc_unreachable ();
969d70ca
JH
5897 }
5898
5899 /* Update LABEL_NUSES. */
60564289 5900 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 5901
5368224f 5902 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 5903
0a3d71f5
JW
5904 /* If the old insn is frame related, then so is the new one. This is
5905 primarily needed for IA-64 unwind info which marks epilogue insns,
5906 which may be duplicated by the basic block reordering code. */
60564289 5907 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 5908
cf7c4aa6
HPN
5909 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5910 will make them. REG_LABEL_TARGETs are created there too, but are
5911 supposed to be sticky, so we copy them. */
969d70ca 5912 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 5913 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
5914 {
5915 if (GET_CODE (link) == EXPR_LIST)
60564289 5916 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 5917 copy_insn_1 (XEXP (link, 0)));
969d70ca 5918 else
60564289 5919 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
5920 }
5921
60564289
KG
5922 INSN_CODE (new_rtx) = INSN_CODE (insn);
5923 return new_rtx;
969d70ca 5924}
e2500fed 5925
1431042e 5926static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5927rtx
5928gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5929{
5930 if (hard_reg_clobbers[mode][regno])
5931 return hard_reg_clobbers[mode][regno];
5932 else
5933 return (hard_reg_clobbers[mode][regno] =
5934 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5935}
5936
5368224f
DC
5937location_t prologue_location;
5938location_t epilogue_location;
78bde837
SB
5939
5940/* Hold current location information and last location information, so the
5941 datastructures are built lazily only when some instructions in given
5942 place are needed. */
3a50da34 5943static location_t curr_location;
78bde837 5944
5368224f 5945/* Allocate insn location datastructure. */
78bde837 5946void
5368224f 5947insn_locations_init (void)
78bde837 5948{
5368224f 5949 prologue_location = epilogue_location = 0;
78bde837 5950 curr_location = UNKNOWN_LOCATION;
78bde837
SB
5951}
5952
5953/* At the end of emit stage, clear current location. */
5954void
5368224f 5955insn_locations_finalize (void)
78bde837 5956{
5368224f
DC
5957 epilogue_location = curr_location;
5958 curr_location = UNKNOWN_LOCATION;
78bde837
SB
5959}
5960
5961/* Set current location. */
5962void
5368224f 5963set_curr_insn_location (location_t location)
78bde837 5964{
78bde837
SB
5965 curr_location = location;
5966}
5967
5968/* Get current location. */
5969location_t
5368224f 5970curr_insn_location (void)
78bde837
SB
5971{
5972 return curr_location;
5973}
5974
78bde837
SB
5975/* Return lexical scope block insn belongs to. */
5976tree
5977insn_scope (const_rtx insn)
5978{
5368224f 5979 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
5980}
5981
5982/* Return line number of the statement that produced this insn. */
5983int
5984insn_line (const_rtx insn)
5985{
5368224f 5986 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
5987}
5988
5989/* Return source file of the statement that produced this insn. */
5990const char *
5991insn_file (const_rtx insn)
5992{
5368224f 5993 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 5994}
8930883e
MK
5995
5996/* Return true if memory model MODEL requires a pre-operation (release-style)
5997 barrier or a post-operation (acquire-style) barrier. While not universal,
5998 this function matches behavior of several targets. */
5999
6000bool
6001need_atomic_barrier_p (enum memmodel model, bool pre)
6002{
88e784e6 6003 switch (model & MEMMODEL_MASK)
8930883e
MK
6004 {
6005 case MEMMODEL_RELAXED:
6006 case MEMMODEL_CONSUME:
6007 return false;
6008 case MEMMODEL_RELEASE:
6009 return pre;
6010 case MEMMODEL_ACQUIRE:
6011 return !pre;
6012 case MEMMODEL_ACQ_REL:
6013 case MEMMODEL_SEQ_CST:
6014 return true;
6015 default:
6016 gcc_unreachable ();
6017 }
6018}
6019\f
e2500fed 6020#include "gt-emit-rtl.h"