]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
lto-partition.c (add_symbol_to_partition_1, [...]): Aliases have no defined size.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
23a5b65a 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
718f9c0f 38#include "diagnostic-core.h"
23b2ce53 39#include "rtl.h"
a25c7971 40#include "tree.h"
d8a2d370 41#include "varasm.h"
2fb9a547
AM
42#include "basic-block.h"
43#include "tree-eh.h"
6baf1cc8 44#include "tm_p.h"
23b2ce53
RS
45#include "flags.h"
46#include "function.h"
d8a2d370 47#include "stringpool.h"
23b2ce53
RS
48#include "expr.h"
49#include "regs.h"
aff48bca 50#include "hard-reg-set.h"
c13e8210 51#include "hashtab.h"
23b2ce53 52#include "insn-config.h"
e9a25f70 53#include "recog.h"
0dfa1860 54#include "bitmap.h"
e1772ac0 55#include "debug.h"
d23c55c2 56#include "langhooks.h"
6fb5fa3c 57#include "df.h"
b5b8b0ac 58#include "params.h"
d4ebfa65 59#include "target.h"
ca695ac9 60
5fb0e246
RS
61struct target_rtl default_target_rtl;
62#if SWITCHABLE_TARGET
63struct target_rtl *this_target_rtl = &default_target_rtl;
64#endif
65
66#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
67
1d445e9e
ILT
68/* Commonly used modes. */
69
0f41302f
MS
70enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
71enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 72enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 73enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 74
bd60bab2
JH
75/* Datastructures maintained for currently processed function in RTL form. */
76
3e029763 77struct rtl_data x_rtl;
bd60bab2
JH
78
79/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 80 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
81 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
82 with length attribute nested in top level structures. */
83
84rtx * regno_reg_rtx;
23b2ce53
RS
85
86/* This is *not* reset after each function. It gives each CODE_LABEL
87 in the entire compilation a unique label number. */
88
044b4de3 89static GTY(()) int label_num = 1;
23b2ce53 90
23b2ce53
RS
91/* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
93 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
94 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 95
e7c82a99 96rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 97
68d75312
JC
98rtx const_true_rtx;
99
23b2ce53
RS
100REAL_VALUE_TYPE dconst0;
101REAL_VALUE_TYPE dconst1;
102REAL_VALUE_TYPE dconst2;
103REAL_VALUE_TYPE dconstm1;
03f2ea93 104REAL_VALUE_TYPE dconsthalf;
23b2ce53 105
325217ed
CF
106/* Record fixed-point constant 0 and 1. */
107FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
108FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
109
23b2ce53
RS
110/* We make one copy of (const_int C) where C is in
111 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
112 to save space during the compilation and simplify comparisons of
113 integers. */
114
5da077de 115rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 116
ca4adc91
RS
117/* Standard pieces of rtx, to be substituted directly into things. */
118rtx pc_rtx;
119rtx ret_rtx;
120rtx simple_return_rtx;
121rtx cc0_rtx;
122
c13e8210
MM
123/* A hash table storing CONST_INTs whose absolute value is greater
124 than MAX_SAVED_CONST_INT. */
125
e2500fed
GK
126static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
127 htab_t const_int_htab;
c13e8210 128
173b24b9 129/* A hash table storing memory attribute structures. */
e2500fed
GK
130static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
131 htab_t mem_attrs_htab;
173b24b9 132
a560d4d4
JH
133/* A hash table storing register attribute structures. */
134static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
135 htab_t reg_attrs_htab;
136
5692c7bc 137/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
138static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_double_htab;
5692c7bc 140
091a3ac7
CF
141/* A hash table storing all CONST_FIXEDs. */
142static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
143 htab_t const_fixed_htab;
144
3e029763 145#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 146#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 147#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 148
502b8322 149static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 150static void set_used_decls (tree);
502b8322
AJ
151static void mark_label_nuses (rtx);
152static hashval_t const_int_htab_hash (const void *);
153static int const_int_htab_eq (const void *, const void *);
154static hashval_t const_double_htab_hash (const void *);
155static int const_double_htab_eq (const void *, const void *);
156static rtx lookup_const_double (rtx);
091a3ac7
CF
157static hashval_t const_fixed_htab_hash (const void *);
158static int const_fixed_htab_eq (const void *, const void *);
159static rtx lookup_const_fixed (rtx);
502b8322
AJ
160static hashval_t mem_attrs_htab_hash (const void *);
161static int mem_attrs_htab_eq (const void *, const void *);
502b8322
AJ
162static hashval_t reg_attrs_htab_hash (const void *);
163static int reg_attrs_htab_eq (const void *, const void *);
164static reg_attrs *get_reg_attrs (tree, int);
a73b091d 165static rtx gen_const_vector (enum machine_mode, int);
32b32b16 166static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 167
6b24c259
JH
168/* Probability of the conditional branch currently proceeded by try_split.
169 Set to -1 otherwise. */
170int split_branch_probability = -1;
ca695ac9 171\f
c13e8210
MM
172/* Returns a hash code for X (which is a really a CONST_INT). */
173
174static hashval_t
502b8322 175const_int_htab_hash (const void *x)
c13e8210 176{
f7d504c2 177 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
178}
179
cc2902df 180/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
181 CONST_INT) is the same as that given by Y (which is really a
182 HOST_WIDE_INT *). */
183
184static int
502b8322 185const_int_htab_eq (const void *x, const void *y)
c13e8210 186{
f7d504c2 187 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
188}
189
190/* Returns a hash code for X (which is really a CONST_DOUBLE). */
191static hashval_t
502b8322 192const_double_htab_hash (const void *x)
5692c7bc 193{
f7d504c2 194 const_rtx const value = (const_rtx) x;
46b33600 195 hashval_t h;
5692c7bc 196
46b33600
RH
197 if (GET_MODE (value) == VOIDmode)
198 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
199 else
fe352c29 200 {
15c812e3 201 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
202 /* MODE is used in the comparison, so it should be in the hash. */
203 h ^= GET_MODE (value);
204 }
5692c7bc
ZW
205 return h;
206}
207
cc2902df 208/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
209 is the same as that represented by Y (really a ...) */
210static int
502b8322 211const_double_htab_eq (const void *x, const void *y)
5692c7bc 212{
f7d504c2 213 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
214
215 if (GET_MODE (a) != GET_MODE (b))
216 return 0;
8580f7a0
RH
217 if (GET_MODE (a) == VOIDmode)
218 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
219 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
220 else
221 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
222 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
223}
224
091a3ac7
CF
225/* Returns a hash code for X (which is really a CONST_FIXED). */
226
227static hashval_t
228const_fixed_htab_hash (const void *x)
229{
3101faab 230 const_rtx const value = (const_rtx) x;
091a3ac7
CF
231 hashval_t h;
232
233 h = fixed_hash (CONST_FIXED_VALUE (value));
234 /* MODE is used in the comparison, so it should be in the hash. */
235 h ^= GET_MODE (value);
236 return h;
237}
238
239/* Returns nonzero if the value represented by X (really a ...)
240 is the same as that represented by Y (really a ...). */
241
242static int
243const_fixed_htab_eq (const void *x, const void *y)
244{
3101faab 245 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
246
247 if (GET_MODE (a) != GET_MODE (b))
248 return 0;
249 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
250}
251
173b24b9
RK
252/* Returns a hash code for X (which is a really a mem_attrs *). */
253
254static hashval_t
502b8322 255mem_attrs_htab_hash (const void *x)
173b24b9 256{
f7d504c2 257 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
258
259 return (p->alias ^ (p->align * 1000)
09e881c9 260 ^ (p->addrspace * 4000)
754c3d5d
RS
261 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
262 ^ ((p->size_known_p ? p->size : 0) * 2500000)
78b76d08 263 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
264}
265
f12144dd 266/* Return true if the given memory attributes are equal. */
c13e8210 267
f12144dd
RS
268static bool
269mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 270{
754c3d5d
RS
271 return (p->alias == q->alias
272 && p->offset_known_p == q->offset_known_p
273 && (!p->offset_known_p || p->offset == q->offset)
274 && p->size_known_p == q->size_known_p
275 && (!p->size_known_p || p->size == q->size)
276 && p->align == q->align
09e881c9 277 && p->addrspace == q->addrspace
78b76d08
SB
278 && (p->expr == q->expr
279 || (p->expr != NULL_TREE && q->expr != NULL_TREE
280 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
281}
282
f12144dd
RS
283/* Returns nonzero if the value represented by X (which is really a
284 mem_attrs *) is the same as that given by Y (which is also really a
285 mem_attrs *). */
173b24b9 286
f12144dd
RS
287static int
288mem_attrs_htab_eq (const void *x, const void *y)
173b24b9 289{
f12144dd
RS
290 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
291}
173b24b9 292
f12144dd 293/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 294
f12144dd
RS
295static void
296set_mem_attrs (rtx mem, mem_attrs *attrs)
297{
298 void **slot;
299
300 /* If everything is the default, we can just clear the attributes. */
301 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
302 {
303 MEM_ATTRS (mem) = 0;
304 return;
305 }
173b24b9 306
f12144dd 307 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
173b24b9
RK
308 if (*slot == 0)
309 {
a9429e29 310 *slot = ggc_alloc_mem_attrs ();
f12144dd 311 memcpy (*slot, attrs, sizeof (mem_attrs));
173b24b9
RK
312 }
313
f12144dd 314 MEM_ATTRS (mem) = (mem_attrs *) *slot;
c13e8210
MM
315}
316
a560d4d4
JH
317/* Returns a hash code for X (which is a really a reg_attrs *). */
318
319static hashval_t
502b8322 320reg_attrs_htab_hash (const void *x)
a560d4d4 321{
741ac903 322 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4 323
9841210f 324 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
325}
326
6356f892 327/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
328 reg_attrs *) is the same as that given by Y (which is also really a
329 reg_attrs *). */
330
331static int
502b8322 332reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 333{
741ac903
KG
334 const reg_attrs *const p = (const reg_attrs *) x;
335 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
336
337 return (p->decl == q->decl && p->offset == q->offset);
338}
339/* Allocate a new reg_attrs structure and insert it into the hash table if
340 one identical to it is not already in the table. We are doing this for
341 MEM of mode MODE. */
342
343static reg_attrs *
502b8322 344get_reg_attrs (tree decl, int offset)
a560d4d4
JH
345{
346 reg_attrs attrs;
347 void **slot;
348
349 /* If everything is the default, we can just return zero. */
350 if (decl == 0 && offset == 0)
351 return 0;
352
353 attrs.decl = decl;
354 attrs.offset = offset;
355
356 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
357 if (*slot == 0)
358 {
a9429e29 359 *slot = ggc_alloc_reg_attrs ();
a560d4d4
JH
360 memcpy (*slot, &attrs, sizeof (reg_attrs));
361 }
362
1b4572a8 363 return (reg_attrs *) *slot;
a560d4d4
JH
364}
365
6fb5fa3c
DB
366
367#if !HAVE_blockage
adddc347
HPN
368/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
369 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
370
371rtx
372gen_blockage (void)
373{
374 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
375 MEM_VOLATILE_P (x) = true;
376 return x;
377}
378#endif
379
380
08394eef
BS
381/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
382 don't attempt to share with the various global pieces of rtl (such as
383 frame_pointer_rtx). */
384
385rtx
502b8322 386gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
387{
388 rtx x = gen_rtx_raw_REG (mode, regno);
389 ORIGINAL_REGNO (x) = regno;
390 return x;
391}
392
c5c76735
JL
393/* There are some RTL codes that require special attention; the generation
394 functions do the raw handling. If you add to this list, modify
395 special_rtx in gengenrtl.c as well. */
396
3b80f6ca 397rtx
502b8322 398gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 399{
c13e8210
MM
400 void **slot;
401
3b80f6ca 402 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 403 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
404
405#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
406 if (const_true_rtx && arg == STORE_FLAG_VALUE)
407 return const_true_rtx;
408#endif
409
c13e8210 410 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
411 slot = htab_find_slot_with_hash (const_int_htab, &arg,
412 (hashval_t) arg, INSERT);
29105cea 413 if (*slot == 0)
1f8f4a0b 414 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
415
416 return (rtx) *slot;
3b80f6ca
RH
417}
418
2496c7bd 419rtx
502b8322 420gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
421{
422 return GEN_INT (trunc_int_for_mode (c, mode));
423}
424
5692c7bc
ZW
425/* CONST_DOUBLEs might be created from pairs of integers, or from
426 REAL_VALUE_TYPEs. Also, their length is known only at run time,
427 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
428
429/* Determine whether REAL, a CONST_DOUBLE, already exists in the
430 hash table. If so, return its counterpart; otherwise add it
431 to the hash table and return it. */
432static rtx
502b8322 433lookup_const_double (rtx real)
5692c7bc
ZW
434{
435 void **slot = htab_find_slot (const_double_htab, real, INSERT);
436 if (*slot == 0)
437 *slot = real;
438
439 return (rtx) *slot;
440}
29105cea 441
5692c7bc
ZW
442/* Return a CONST_DOUBLE rtx for a floating-point value specified by
443 VALUE in mode MODE. */
0133b7d9 444rtx
502b8322 445const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 446{
5692c7bc
ZW
447 rtx real = rtx_alloc (CONST_DOUBLE);
448 PUT_MODE (real, mode);
449
9e254451 450 real->u.rv = value;
5692c7bc
ZW
451
452 return lookup_const_double (real);
453}
454
091a3ac7
CF
455/* Determine whether FIXED, a CONST_FIXED, already exists in the
456 hash table. If so, return its counterpart; otherwise add it
457 to the hash table and return it. */
458
459static rtx
460lookup_const_fixed (rtx fixed)
461{
462 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
463 if (*slot == 0)
464 *slot = fixed;
465
466 return (rtx) *slot;
467}
468
469/* Return a CONST_FIXED rtx for a fixed-point value specified by
470 VALUE in mode MODE. */
471
472rtx
473const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
474{
475 rtx fixed = rtx_alloc (CONST_FIXED);
476 PUT_MODE (fixed, mode);
477
478 fixed->u.fv = value;
479
480 return lookup_const_fixed (fixed);
481}
482
3e93ff81
AS
483/* Constructs double_int from rtx CST. */
484
485double_int
486rtx_to_double_int (const_rtx cst)
487{
488 double_int r;
489
490 if (CONST_INT_P (cst))
27bcd47c 491 r = double_int::from_shwi (INTVAL (cst));
48175537 492 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
493 {
494 r.low = CONST_DOUBLE_LOW (cst);
495 r.high = CONST_DOUBLE_HIGH (cst);
496 }
497 else
498 gcc_unreachable ();
499
500 return r;
501}
502
503
54fb1ae0
AS
504/* Return a CONST_DOUBLE or CONST_INT for a value specified as
505 a double_int. */
506
507rtx
508immed_double_int_const (double_int i, enum machine_mode mode)
509{
510 return immed_double_const (i.low, i.high, mode);
511}
512
5692c7bc
ZW
513/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
514 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 515 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
516 implied upper bits are copies of the high bit of i1. The value
517 itself is neither signed nor unsigned. Do not use this routine for
518 non-integer modes; convert to REAL_VALUE_TYPE and use
519 CONST_DOUBLE_FROM_REAL_VALUE. */
5692c7bc
ZW
520
521rtx
502b8322 522immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
523{
524 rtx value;
525 unsigned int i;
526
65acccdd 527 /* There are the following cases (note that there are no modes with
49ab6098 528 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
529
530 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
531 gen_int_mode.
929e10f4
MS
532 2) If the value of the integer fits into HOST_WIDE_INT anyway
533 (i.e., i1 consists only from copies of the sign bit, and sign
534 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 535 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
536 if (mode != VOIDmode)
537 {
5b0264cb
NS
538 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
539 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
540 /* We can get a 0 for an error mark. */
541 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
542 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 543
65acccdd
ZD
544 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
545 return gen_int_mode (i0, mode);
5692c7bc
ZW
546 }
547
548 /* If this integer fits in one word, return a CONST_INT. */
549 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
550 return GEN_INT (i0);
551
552 /* We use VOIDmode for integers. */
553 value = rtx_alloc (CONST_DOUBLE);
554 PUT_MODE (value, VOIDmode);
555
556 CONST_DOUBLE_LOW (value) = i0;
557 CONST_DOUBLE_HIGH (value) = i1;
558
559 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
560 XWINT (value, i) = 0;
561
562 return lookup_const_double (value);
0133b7d9
RH
563}
564
3b80f6ca 565rtx
502b8322 566gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
567{
568 /* In case the MD file explicitly references the frame pointer, have
569 all such references point to the same frame pointer. This is
570 used during frame pointer elimination to distinguish the explicit
571 references to these registers from pseudos that happened to be
572 assigned to them.
573
574 If we have eliminated the frame pointer or arg pointer, we will
575 be using it as a normal register, for example as a spill
576 register. In such cases, we might be accessing it in a mode that
577 is not Pmode and therefore cannot use the pre-allocated rtx.
578
579 Also don't do this when we are making new REGs in reload, since
580 we don't want to get confused with the real pointers. */
581
55a2c322 582 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 583 {
e10c79fe
LB
584 if (regno == FRAME_POINTER_REGNUM
585 && (!reload_completed || frame_pointer_needed))
3b80f6ca 586 return frame_pointer_rtx;
e3339d0f 587#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
588 if (regno == HARD_FRAME_POINTER_REGNUM
589 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
590 return hard_frame_pointer_rtx;
591#endif
e3339d0f 592#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 593 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
594 return arg_pointer_rtx;
595#endif
596#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 597 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
598 return return_address_pointer_rtx;
599#endif
fc555370 600 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 601 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 602 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 603 return pic_offset_table_rtx;
bcb33994 604 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
605 return stack_pointer_rtx;
606 }
607
006a94b0 608#if 0
6cde4876 609 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
610 an existing entry in that table to avoid useless generation of RTL.
611
612 This code is disabled for now until we can fix the various backends
613 which depend on having non-shared hard registers in some cases. Long
614 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
615 on the amount of useless RTL that gets generated.
616
617 We'll also need to fix some code that runs after reload that wants to
618 set ORIGINAL_REGNO. */
619
6cde4876
JL
620 if (cfun
621 && cfun->emit
622 && regno_reg_rtx
623 && regno < FIRST_PSEUDO_REGISTER
624 && reg_raw_mode[regno] == mode)
625 return regno_reg_rtx[regno];
006a94b0 626#endif
6cde4876 627
08394eef 628 return gen_raw_REG (mode, regno);
3b80f6ca
RH
629}
630
41472af8 631rtx
502b8322 632gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
633{
634 rtx rt = gen_rtx_raw_MEM (mode, addr);
635
636 /* This field is not cleared by the mere allocation of the rtx, so
637 we clear it here. */
173b24b9 638 MEM_ATTRS (rt) = 0;
41472af8
MM
639
640 return rt;
641}
ddef6bc7 642
542a8afa
RH
643/* Generate a memory referring to non-trapping constant memory. */
644
645rtx
646gen_const_mem (enum machine_mode mode, rtx addr)
647{
648 rtx mem = gen_rtx_MEM (mode, addr);
649 MEM_READONLY_P (mem) = 1;
650 MEM_NOTRAP_P (mem) = 1;
651 return mem;
652}
653
bf877a76
R
654/* Generate a MEM referring to fixed portions of the frame, e.g., register
655 save areas. */
656
657rtx
658gen_frame_mem (enum machine_mode mode, rtx addr)
659{
660 rtx mem = gen_rtx_MEM (mode, addr);
661 MEM_NOTRAP_P (mem) = 1;
662 set_mem_alias_set (mem, get_frame_alias_set ());
663 return mem;
664}
665
666/* Generate a MEM referring to a temporary use of the stack, not part
667 of the fixed stack frame. For example, something which is pushed
668 by a target splitter. */
669rtx
670gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
671{
672 rtx mem = gen_rtx_MEM (mode, addr);
673 MEM_NOTRAP_P (mem) = 1;
e3b5732b 674 if (!cfun->calls_alloca)
bf877a76
R
675 set_mem_alias_set (mem, get_frame_alias_set ());
676 return mem;
677}
678
beb72684
RH
679/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
680 this construct would be valid, and false otherwise. */
681
682bool
683validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 684 const_rtx reg, unsigned int offset)
ddef6bc7 685{
beb72684
RH
686 unsigned int isize = GET_MODE_SIZE (imode);
687 unsigned int osize = GET_MODE_SIZE (omode);
688
689 /* All subregs must be aligned. */
690 if (offset % osize != 0)
691 return false;
692
693 /* The subreg offset cannot be outside the inner object. */
694 if (offset >= isize)
695 return false;
696
697 /* ??? This should not be here. Temporarily continue to allow word_mode
698 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
699 Generally, backends are doing something sketchy but it'll take time to
700 fix them all. */
701 if (omode == word_mode)
702 ;
703 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
704 is the culprit here, and not the backends. */
705 else if (osize >= UNITS_PER_WORD && isize >= osize)
706 ;
707 /* Allow component subregs of complex and vector. Though given the below
708 extraction rules, it's not always clear what that means. */
709 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
710 && GET_MODE_INNER (imode) == omode)
711 ;
712 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
713 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
714 represent this. It's questionable if this ought to be represented at
715 all -- why can't this all be hidden in post-reload splitters that make
716 arbitrarily mode changes to the registers themselves. */
717 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
718 ;
719 /* Subregs involving floating point modes are not allowed to
720 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
721 (subreg:SI (reg:DF) 0) isn't. */
722 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
723 {
55a2c322
VM
724 if (! (isize == osize
725 /* LRA can use subreg to store a floating point value in
726 an integer mode. Although the floating point and the
727 integer modes need the same number of hard registers,
728 the size of floating point mode can be less than the
729 integer mode. LRA also uses subregs for a register
730 should be used in different mode in on insn. */
731 || lra_in_progress))
beb72684
RH
732 return false;
733 }
ddef6bc7 734
beb72684
RH
735 /* Paradoxical subregs must have offset zero. */
736 if (osize > isize)
737 return offset == 0;
738
739 /* This is a normal subreg. Verify that the offset is representable. */
740
741 /* For hard registers, we already have most of these rules collected in
742 subreg_offset_representable_p. */
743 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
744 {
745 unsigned int regno = REGNO (reg);
746
747#ifdef CANNOT_CHANGE_MODE_CLASS
748 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
749 && GET_MODE_INNER (imode) == omode)
750 ;
751 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
752 return false;
ddef6bc7 753#endif
beb72684
RH
754
755 return subreg_offset_representable_p (regno, imode, offset, omode);
756 }
757
758 /* For pseudo registers, we want most of the same checks. Namely:
759 If the register no larger than a word, the subreg must be lowpart.
760 If the register is larger than a word, the subreg must be the lowpart
761 of a subword. A subreg does *not* perform arbitrary bit extraction.
762 Given that we've already checked mode/offset alignment, we only have
763 to check subword subregs here. */
55a2c322
VM
764 if (osize < UNITS_PER_WORD
765 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684
RH
766 {
767 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
768 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
769 if (offset % UNITS_PER_WORD != low_off)
770 return false;
771 }
772 return true;
773}
774
775rtx
776gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
777{
778 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 779 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
780}
781
173b24b9
RK
782/* Generate a SUBREG representing the least-significant part of REG if MODE
783 is smaller than mode of REG, otherwise paradoxical SUBREG. */
784
ddef6bc7 785rtx
502b8322 786gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
787{
788 enum machine_mode inmode;
ddef6bc7
JJ
789
790 inmode = GET_MODE (reg);
791 if (inmode == VOIDmode)
792 inmode = mode;
e0e08ac2
JH
793 return gen_rtx_SUBREG (mode, reg,
794 subreg_lowpart_offset (mode, inmode));
ddef6bc7 795}
c5c76735 796\f
23b2ce53 797
80379f51
PB
798/* Create an rtvec and stores within it the RTXen passed in the arguments. */
799
23b2ce53 800rtvec
e34d07f2 801gen_rtvec (int n, ...)
23b2ce53 802{
80379f51
PB
803 int i;
804 rtvec rt_val;
e34d07f2 805 va_list p;
23b2ce53 806
e34d07f2 807 va_start (p, n);
23b2ce53 808
80379f51 809 /* Don't allocate an empty rtvec... */
23b2ce53 810 if (n == 0)
0edf1bb2
JL
811 {
812 va_end (p);
813 return NULL_RTVEC;
814 }
23b2ce53 815
80379f51 816 rt_val = rtvec_alloc (n);
4f90e4a0 817
23b2ce53 818 for (i = 0; i < n; i++)
80379f51 819 rt_val->elem[i] = va_arg (p, rtx);
6268b922 820
e34d07f2 821 va_end (p);
80379f51 822 return rt_val;
23b2ce53
RS
823}
824
825rtvec
502b8322 826gen_rtvec_v (int n, rtx *argp)
23b2ce53 827{
b3694847
SS
828 int i;
829 rtvec rt_val;
23b2ce53 830
80379f51 831 /* Don't allocate an empty rtvec... */
23b2ce53 832 if (n == 0)
80379f51 833 return NULL_RTVEC;
23b2ce53 834
80379f51 835 rt_val = rtvec_alloc (n);
23b2ce53
RS
836
837 for (i = 0; i < n; i++)
8f985ec4 838 rt_val->elem[i] = *argp++;
23b2ce53
RS
839
840 return rt_val;
841}
842\f
38ae7651
RS
843/* Return the number of bytes between the start of an OUTER_MODE
844 in-memory value and the start of an INNER_MODE in-memory value,
845 given that the former is a lowpart of the latter. It may be a
846 paradoxical lowpart, in which case the offset will be negative
847 on big-endian targets. */
848
849int
850byte_lowpart_offset (enum machine_mode outer_mode,
851 enum machine_mode inner_mode)
852{
853 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
854 return subreg_lowpart_offset (outer_mode, inner_mode);
855 else
856 return -subreg_lowpart_offset (inner_mode, outer_mode);
857}
858\f
23b2ce53
RS
859/* Generate a REG rtx for a new pseudo register of mode MODE.
860 This pseudo is assigned the next sequential register number. */
861
862rtx
502b8322 863gen_reg_rtx (enum machine_mode mode)
23b2ce53 864{
b3694847 865 rtx val;
2e3f842f 866 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 867
f8335a4f 868 gcc_assert (can_create_pseudo_p ());
23b2ce53 869
2e3f842f
L
870 /* If a virtual register with bigger mode alignment is generated,
871 increase stack alignment estimation because it might be spilled
872 to stack later. */
b8698a0f 873 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
874 && crtl->stack_alignment_estimated < align
875 && !crtl->stack_realign_processed)
ae58e548
JJ
876 {
877 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
878 if (crtl->stack_alignment_estimated < min_align)
879 crtl->stack_alignment_estimated = min_align;
880 }
2e3f842f 881
1b3d8f8a
GK
882 if (generating_concat_p
883 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
884 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
885 {
886 /* For complex modes, don't make a single pseudo.
887 Instead, make a CONCAT of two pseudos.
888 This allows noncontiguous allocation of the real and imaginary parts,
889 which makes much better code. Besides, allocating DCmode
890 pseudos overstrains reload on some machines like the 386. */
891 rtx realpart, imagpart;
27e58a70 892 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
893
894 realpart = gen_reg_rtx (partmode);
895 imagpart = gen_reg_rtx (partmode);
3b80f6ca 896 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
897 }
898
a560d4d4 899 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 900 enough to have an element for this pseudo reg number. */
23b2ce53 901
3e029763 902 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 903 {
3e029763 904 int old_size = crtl->emit.regno_pointer_align_length;
60564289 905 char *tmp;
0d4903b8 906 rtx *new1;
0d4903b8 907
60564289
KG
908 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
909 memset (tmp + old_size, 0, old_size);
910 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 911
1b4572a8 912 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 913 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
914 regno_reg_rtx = new1;
915
3e029763 916 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
917 }
918
08394eef 919 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
920 regno_reg_rtx[reg_rtx_no++] = val;
921 return val;
922}
923
a698cc03
JL
924/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
925
926bool
927reg_is_parm_p (rtx reg)
928{
929 tree decl;
930
931 gcc_assert (REG_P (reg));
932 decl = REG_EXPR (reg);
933 return (decl && TREE_CODE (decl) == PARM_DECL);
934}
935
38ae7651
RS
936/* Update NEW with the same attributes as REG, but with OFFSET added
937 to the REG_OFFSET. */
a560d4d4 938
e53a16e7 939static void
60564289 940update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 941{
60564289 942 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 943 REG_OFFSET (reg) + offset);
e53a16e7
ILT
944}
945
38ae7651
RS
946/* Generate a register with same attributes as REG, but with OFFSET
947 added to the REG_OFFSET. */
e53a16e7
ILT
948
949rtx
950gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
951 int offset)
952{
60564289 953 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 954
60564289
KG
955 update_reg_offset (new_rtx, reg, offset);
956 return new_rtx;
e53a16e7
ILT
957}
958
959/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 960 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
961
962rtx
963gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
964{
60564289 965 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 966
60564289
KG
967 update_reg_offset (new_rtx, reg, offset);
968 return new_rtx;
a560d4d4
JH
969}
970
38ae7651
RS
971/* Adjust REG in-place so that it has mode MODE. It is assumed that the
972 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
973
974void
38ae7651 975adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 976{
38ae7651
RS
977 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
978 PUT_MODE (reg, mode);
979}
980
981/* Copy REG's attributes from X, if X has any attributes. If REG and X
982 have different modes, REG is a (possibly paradoxical) lowpart of X. */
983
984void
985set_reg_attrs_from_value (rtx reg, rtx x)
986{
987 int offset;
de6f3f7a
L
988 bool can_be_reg_pointer = true;
989
990 /* Don't call mark_reg_pointer for incompatible pointer sign
991 extension. */
992 while (GET_CODE (x) == SIGN_EXTEND
993 || GET_CODE (x) == ZERO_EXTEND
994 || GET_CODE (x) == TRUNCATE
995 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
996 {
997#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
998 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
999 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1000 can_be_reg_pointer = false;
1001#endif
1002 x = XEXP (x, 0);
1003 }
38ae7651 1004
923ba36f
JJ
1005 /* Hard registers can be reused for multiple purposes within the same
1006 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1007 on them is wrong. */
1008 if (HARD_REGISTER_P (reg))
1009 return;
1010
38ae7651 1011 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1012 if (MEM_P (x))
1013 {
527210c4
RS
1014 if (MEM_OFFSET_KNOWN_P (x))
1015 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1016 MEM_OFFSET (x) + offset);
de6f3f7a 1017 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1018 mark_reg_pointer (reg, 0);
46b71b03
PB
1019 }
1020 else if (REG_P (x))
1021 {
1022 if (REG_ATTRS (x))
1023 update_reg_offset (reg, x, offset);
de6f3f7a 1024 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1025 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1026 }
1027}
1028
1029/* Generate a REG rtx for a new pseudo register, copying the mode
1030 and attributes from X. */
1031
1032rtx
1033gen_reg_rtx_and_attrs (rtx x)
1034{
1035 rtx reg = gen_reg_rtx (GET_MODE (x));
1036 set_reg_attrs_from_value (reg, x);
1037 return reg;
a560d4d4
JH
1038}
1039
9d18e06b
JZ
1040/* Set the register attributes for registers contained in PARM_RTX.
1041 Use needed values from memory attributes of MEM. */
1042
1043void
502b8322 1044set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1045{
f8cfc6aa 1046 if (REG_P (parm_rtx))
38ae7651 1047 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1048 else if (GET_CODE (parm_rtx) == PARALLEL)
1049 {
1050 /* Check for a NULL entry in the first slot, used to indicate that the
1051 parameter goes both on the stack and in registers. */
1052 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1053 for (; i < XVECLEN (parm_rtx, 0); i++)
1054 {
1055 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1056 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1057 REG_ATTRS (XEXP (x, 0))
1058 = get_reg_attrs (MEM_EXPR (mem),
1059 INTVAL (XEXP (x, 1)));
1060 }
1061 }
1062}
1063
38ae7651
RS
1064/* Set the REG_ATTRS for registers in value X, given that X represents
1065 decl T. */
a560d4d4 1066
4e3825db 1067void
38ae7651
RS
1068set_reg_attrs_for_decl_rtl (tree t, rtx x)
1069{
1070 if (GET_CODE (x) == SUBREG)
fbe6ec81 1071 {
38ae7651
RS
1072 gcc_assert (subreg_lowpart_p (x));
1073 x = SUBREG_REG (x);
fbe6ec81 1074 }
f8cfc6aa 1075 if (REG_P (x))
38ae7651
RS
1076 REG_ATTRS (x)
1077 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1078 DECL_MODE (t)));
a560d4d4
JH
1079 if (GET_CODE (x) == CONCAT)
1080 {
1081 if (REG_P (XEXP (x, 0)))
1082 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1083 if (REG_P (XEXP (x, 1)))
1084 REG_ATTRS (XEXP (x, 1))
1085 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1086 }
1087 if (GET_CODE (x) == PARALLEL)
1088 {
d4afac5b
JZ
1089 int i, start;
1090
1091 /* Check for a NULL entry, used to indicate that the parameter goes
1092 both on the stack and in registers. */
1093 if (XEXP (XVECEXP (x, 0, 0), 0))
1094 start = 0;
1095 else
1096 start = 1;
1097
1098 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1099 {
1100 rtx y = XVECEXP (x, 0, i);
1101 if (REG_P (XEXP (y, 0)))
1102 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1103 }
1104 }
1105}
1106
38ae7651
RS
1107/* Assign the RTX X to declaration T. */
1108
1109void
1110set_decl_rtl (tree t, rtx x)
1111{
1112 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1113 if (x)
1114 set_reg_attrs_for_decl_rtl (t, x);
1115}
1116
5141868d
RS
1117/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1118 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1119
1120void
5141868d 1121set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1122{
1123 DECL_INCOMING_RTL (t) = x;
5141868d 1124 if (x && !by_reference_p)
38ae7651
RS
1125 set_reg_attrs_for_decl_rtl (t, x);
1126}
1127
754fdcca
RK
1128/* Identify REG (which may be a CONCAT) as a user register. */
1129
1130void
502b8322 1131mark_user_reg (rtx reg)
754fdcca
RK
1132{
1133 if (GET_CODE (reg) == CONCAT)
1134 {
1135 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1136 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1137 }
754fdcca 1138 else
5b0264cb
NS
1139 {
1140 gcc_assert (REG_P (reg));
1141 REG_USERVAR_P (reg) = 1;
1142 }
754fdcca
RK
1143}
1144
86fe05e0
RK
1145/* Identify REG as a probable pointer register and show its alignment
1146 as ALIGN, if nonzero. */
23b2ce53
RS
1147
1148void
502b8322 1149mark_reg_pointer (rtx reg, int align)
23b2ce53 1150{
3502dc9c 1151 if (! REG_POINTER (reg))
00995e78 1152 {
3502dc9c 1153 REG_POINTER (reg) = 1;
86fe05e0 1154
00995e78
RE
1155 if (align)
1156 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1157 }
1158 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1159 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1160 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1161}
1162
1163/* Return 1 plus largest pseudo reg number used in the current function. */
1164
1165int
502b8322 1166max_reg_num (void)
23b2ce53
RS
1167{
1168 return reg_rtx_no;
1169}
1170
1171/* Return 1 + the largest label number used so far in the current function. */
1172
1173int
502b8322 1174max_label_num (void)
23b2ce53 1175{
23b2ce53
RS
1176 return label_num;
1177}
1178
1179/* Return first label number used in this function (if any were used). */
1180
1181int
502b8322 1182get_first_label_num (void)
23b2ce53
RS
1183{
1184 return first_label_num;
1185}
6de9cd9a
DN
1186
1187/* If the rtx for label was created during the expansion of a nested
1188 function, then first_label_num won't include this label number.
fa10beec 1189 Fix this now so that array indices work later. */
6de9cd9a
DN
1190
1191void
1192maybe_set_first_label_num (rtx x)
1193{
1194 if (CODE_LABEL_NUMBER (x) < first_label_num)
1195 first_label_num = CODE_LABEL_NUMBER (x);
1196}
23b2ce53
RS
1197\f
1198/* Return a value representing some low-order bits of X, where the number
1199 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1200 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1201 representation is returned.
1202
1203 This function handles the cases in common between gen_lowpart, below,
1204 and two variants in cse.c and combine.c. These are the cases that can
1205 be safely handled at all points in the compilation.
1206
1207 If this is not a case we can handle, return 0. */
1208
1209rtx
502b8322 1210gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1211{
ddef6bc7 1212 int msize = GET_MODE_SIZE (mode);
550d1387 1213 int xsize;
ddef6bc7 1214 int offset = 0;
550d1387
GK
1215 enum machine_mode innermode;
1216
1217 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1218 so we have to make one up. Yuk. */
1219 innermode = GET_MODE (x);
481683e1 1220 if (CONST_INT_P (x)
db487452 1221 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1222 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1223 else if (innermode == VOIDmode)
49ab6098 1224 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1225
550d1387
GK
1226 xsize = GET_MODE_SIZE (innermode);
1227
5b0264cb 1228 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1229
550d1387 1230 if (innermode == mode)
23b2ce53
RS
1231 return x;
1232
1233 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1234 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1235 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1236 return 0;
1237
53501a19 1238 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1239 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1240 return 0;
1241
550d1387 1242 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1243
1244 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1245 && (GET_MODE_CLASS (mode) == MODE_INT
1246 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1247 {
1248 /* If we are getting the low-order part of something that has been
1249 sign- or zero-extended, we can either just use the object being
1250 extended or make a narrower extension. If we want an even smaller
1251 piece than the size of the object being extended, call ourselves
1252 recursively.
1253
1254 This case is used mostly by combine and cse. */
1255
1256 if (GET_MODE (XEXP (x, 0)) == mode)
1257 return XEXP (x, 0);
550d1387 1258 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1259 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1260 else if (msize < xsize)
3b80f6ca 1261 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1262 }
f8cfc6aa 1263 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1264 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1265 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
550d1387 1266 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1267
23b2ce53
RS
1268 /* Otherwise, we can't do this. */
1269 return 0;
1270}
1271\f
ccba022b 1272rtx
502b8322 1273gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1274{
ddef6bc7 1275 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1276 rtx result;
ddef6bc7 1277
ccba022b
RS
1278 /* This case loses if X is a subreg. To catch bugs early,
1279 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1280 gcc_assert (msize <= UNITS_PER_WORD
1281 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1282
e0e08ac2
JH
1283 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1284 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1285 gcc_assert (result);
b8698a0f 1286
09482e0d
JW
1287 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1288 the target if we have a MEM. gen_highpart must return a valid operand,
1289 emitting code if necessary to do so. */
5b0264cb
NS
1290 if (MEM_P (result))
1291 {
1292 result = validize_mem (result);
1293 gcc_assert (result);
1294 }
b8698a0f 1295
e0e08ac2
JH
1296 return result;
1297}
5222e470 1298
26d249eb 1299/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1300 be VOIDmode constant. */
1301rtx
502b8322 1302gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1303{
1304 if (GET_MODE (exp) != VOIDmode)
1305 {
5b0264cb 1306 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1307 return gen_highpart (outermode, exp);
1308 }
1309 return simplify_gen_subreg (outermode, exp, innermode,
1310 subreg_highpart_offset (outermode, innermode));
1311}
68252e27 1312
38ae7651 1313/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1314
e0e08ac2 1315unsigned int
502b8322 1316subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1317{
1318 unsigned int offset = 0;
1319 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1320
e0e08ac2 1321 if (difference > 0)
ccba022b 1322 {
e0e08ac2
JH
1323 if (WORDS_BIG_ENDIAN)
1324 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1325 if (BYTES_BIG_ENDIAN)
1326 offset += difference % UNITS_PER_WORD;
ccba022b 1327 }
ddef6bc7 1328
e0e08ac2 1329 return offset;
ccba022b 1330}
eea50aa0 1331
e0e08ac2
JH
1332/* Return offset in bytes to get OUTERMODE high part
1333 of the value in mode INNERMODE stored in memory in target format. */
1334unsigned int
502b8322 1335subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1336{
1337 unsigned int offset = 0;
1338 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1339
5b0264cb 1340 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1341
eea50aa0
JH
1342 if (difference > 0)
1343 {
e0e08ac2 1344 if (! WORDS_BIG_ENDIAN)
eea50aa0 1345 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1346 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1347 offset += difference % UNITS_PER_WORD;
1348 }
1349
e0e08ac2 1350 return offset;
eea50aa0 1351}
ccba022b 1352
23b2ce53
RS
1353/* Return 1 iff X, assumed to be a SUBREG,
1354 refers to the least significant part of its containing reg.
1355 If X is not a SUBREG, always return 1 (it is its own low part!). */
1356
1357int
fa233e34 1358subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1359{
1360 if (GET_CODE (x) != SUBREG)
1361 return 1;
a3a03040
RK
1362 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1363 return 0;
23b2ce53 1364
e0e08ac2
JH
1365 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1366 == SUBREG_BYTE (x));
23b2ce53 1367}
6a4bdc79
BS
1368
1369/* Return true if X is a paradoxical subreg, false otherwise. */
1370bool
1371paradoxical_subreg_p (const_rtx x)
1372{
1373 if (GET_CODE (x) != SUBREG)
1374 return false;
1375 return (GET_MODE_PRECISION (GET_MODE (x))
1376 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1377}
23b2ce53 1378\f
ddef6bc7
JJ
1379/* Return subword OFFSET of operand OP.
1380 The word number, OFFSET, is interpreted as the word number starting
1381 at the low-order address. OFFSET 0 is the low-order word if not
1382 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1383
1384 If we cannot extract the required word, we return zero. Otherwise,
1385 an rtx corresponding to the requested word will be returned.
1386
1387 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1388 reload has completed, a valid address will always be returned. After
1389 reload, if a valid address cannot be returned, we return zero.
1390
1391 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1392 it is the responsibility of the caller.
1393
1394 MODE is the mode of OP in case it is a CONST_INT.
1395
1396 ??? This is still rather broken for some cases. The problem for the
1397 moment is that all callers of this thing provide no 'goal mode' to
1398 tell us to work with. This exists because all callers were written
0631e0bf
JH
1399 in a word based SUBREG world.
1400 Now use of this function can be deprecated by simplify_subreg in most
1401 cases.
1402 */
ddef6bc7
JJ
1403
1404rtx
502b8322 1405operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1406{
1407 if (mode == VOIDmode)
1408 mode = GET_MODE (op);
1409
5b0264cb 1410 gcc_assert (mode != VOIDmode);
ddef6bc7 1411
30f7a378 1412 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1413 if (mode != BLKmode
1414 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1415 return 0;
1416
30f7a378 1417 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1418 if (mode != BLKmode
1419 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1420 return const0_rtx;
1421
ddef6bc7 1422 /* Form a new MEM at the requested address. */
3c0cb5de 1423 if (MEM_P (op))
ddef6bc7 1424 {
60564289 1425 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1426
f1ec5147 1427 if (! validate_address)
60564289 1428 return new_rtx;
f1ec5147
RK
1429
1430 else if (reload_completed)
ddef6bc7 1431 {
09e881c9
BE
1432 if (! strict_memory_address_addr_space_p (word_mode,
1433 XEXP (new_rtx, 0),
1434 MEM_ADDR_SPACE (op)))
f1ec5147 1435 return 0;
ddef6bc7 1436 }
f1ec5147 1437 else
60564289 1438 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1439 }
1440
0631e0bf
JH
1441 /* Rest can be handled by simplify_subreg. */
1442 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1443}
1444
535a42b1
NS
1445/* Similar to `operand_subword', but never return 0. If we can't
1446 extract the required subword, put OP into a register and try again.
1447 The second attempt must succeed. We always validate the address in
1448 this case.
23b2ce53
RS
1449
1450 MODE is the mode of OP, in case it is CONST_INT. */
1451
1452rtx
502b8322 1453operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1454{
ddef6bc7 1455 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1456
1457 if (result)
1458 return result;
1459
1460 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1461 {
1462 /* If this is a register which can not be accessed by words, copy it
1463 to a pseudo register. */
f8cfc6aa 1464 if (REG_P (op))
77e6b0eb
JC
1465 op = copy_to_reg (op);
1466 else
1467 op = force_reg (mode, op);
1468 }
23b2ce53 1469
ddef6bc7 1470 result = operand_subword (op, offset, 1, mode);
5b0264cb 1471 gcc_assert (result);
23b2ce53
RS
1472
1473 return result;
1474}
1475\f
2b3493c8
AK
1476/* Returns 1 if both MEM_EXPR can be considered equal
1477 and 0 otherwise. */
1478
1479int
4f588890 1480mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1481{
1482 if (expr1 == expr2)
1483 return 1;
1484
1485 if (! expr1 || ! expr2)
1486 return 0;
1487
1488 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1489 return 0;
1490
55b34b5f 1491 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1492}
1493
805903b5
JJ
1494/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1495 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1496 -1 if not known. */
1497
1498int
d9223014 1499get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1500{
1501 tree expr;
1502 unsigned HOST_WIDE_INT offset;
1503
1504 /* This function can't use
527210c4 1505 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1506 || (MAX (MEM_ALIGN (mem),
0eb77834 1507 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1508 < align))
1509 return -1;
1510 else
527210c4 1511 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1512 for two reasons:
1513 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1514 for <variable>. get_inner_reference doesn't handle it and
1515 even if it did, the alignment in that case needs to be determined
1516 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1517 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1518 isn't sufficiently aligned, the object it is in might be. */
1519 gcc_assert (MEM_P (mem));
1520 expr = MEM_EXPR (mem);
527210c4 1521 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1522 return -1;
1523
527210c4 1524 offset = MEM_OFFSET (mem);
805903b5
JJ
1525 if (DECL_P (expr))
1526 {
1527 if (DECL_ALIGN (expr) < align)
1528 return -1;
1529 }
1530 else if (INDIRECT_REF_P (expr))
1531 {
1532 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1533 return -1;
1534 }
1535 else if (TREE_CODE (expr) == COMPONENT_REF)
1536 {
1537 while (1)
1538 {
1539 tree inner = TREE_OPERAND (expr, 0);
1540 tree field = TREE_OPERAND (expr, 1);
1541 tree byte_offset = component_ref_field_offset (expr);
1542 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1543
1544 if (!byte_offset
cc269bb6
RS
1545 || !tree_fits_uhwi_p (byte_offset)
1546 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1547 return -1;
1548
ae7e9ddd
RS
1549 offset += tree_to_uhwi (byte_offset);
1550 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1551
1552 if (inner == NULL_TREE)
1553 {
1554 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1555 < (unsigned int) align)
1556 return -1;
1557 break;
1558 }
1559 else if (DECL_P (inner))
1560 {
1561 if (DECL_ALIGN (inner) < align)
1562 return -1;
1563 break;
1564 }
1565 else if (TREE_CODE (inner) != COMPONENT_REF)
1566 return -1;
1567 expr = inner;
1568 }
1569 }
1570 else
1571 return -1;
1572
1573 return offset & ((align / BITS_PER_UNIT) - 1);
1574}
1575
6926c713 1576/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1577 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1578 if we are making a new object of this type. BITPOS is nonzero if
1579 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1580
1581void
502b8322
AJ
1582set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1583 HOST_WIDE_INT bitpos)
173b24b9 1584{
6f1087be 1585 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1586 tree type;
f12144dd 1587 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1588 addr_space_t as;
173b24b9
RK
1589
1590 /* It can happen that type_for_mode was given a mode for which there
1591 is no language-level type. In which case it returns NULL, which
1592 we can see here. */
1593 if (t == NULL_TREE)
1594 return;
1595
1596 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1597 if (type == error_mark_node)
1598 return;
173b24b9 1599
173b24b9
RK
1600 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1601 wrong answer, as it assumes that DECL_RTL already has the right alias
1602 info. Callers should not set DECL_RTL until after the call to
1603 set_mem_attributes. */
5b0264cb 1604 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1605
f12144dd
RS
1606 memset (&attrs, 0, sizeof (attrs));
1607
738cc472 1608 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1609 front-end routine) and use it. */
f12144dd 1610 attrs.alias = get_alias_set (t);
173b24b9 1611
a5e9c810 1612 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1613 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1614
268f7033 1615 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1616 refattrs = MEM_ATTRS (ref);
1617 if (refattrs)
268f7033
UW
1618 {
1619 /* ??? Can this ever happen? Calling this routine on a MEM that
1620 already carries memory attributes should probably be invalid. */
f12144dd 1621 attrs.expr = refattrs->expr;
754c3d5d 1622 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1623 attrs.offset = refattrs->offset;
754c3d5d 1624 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1625 attrs.size = refattrs->size;
1626 attrs.align = refattrs->align;
268f7033
UW
1627 }
1628
1629 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1630 else
268f7033 1631 {
f12144dd
RS
1632 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1633 gcc_assert (!defattrs->expr);
754c3d5d 1634 gcc_assert (!defattrs->offset_known_p);
f12144dd 1635
268f7033 1636 /* Respect mode size. */
754c3d5d 1637 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1638 attrs.size = defattrs->size;
268f7033
UW
1639 /* ??? Is this really necessary? We probably should always get
1640 the size from the type below. */
1641
1642 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1643 if T is an object, always compute the object alignment below. */
f12144dd
RS
1644 if (TYPE_P (t))
1645 attrs.align = defattrs->align;
1646 else
1647 attrs.align = BITS_PER_UNIT;
268f7033
UW
1648 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1649 e.g. if the type carries an alignment attribute. Should we be
1650 able to simply always use TYPE_ALIGN? */
1651 }
1652
c3d32120
RK
1653 /* We can set the alignment from the type if we are making an object,
1654 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1655 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1656 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1657
738cc472 1658 /* If the size is known, we can set that. */
a787ccc3 1659 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1660
30b0317c
RB
1661 /* The address-space is that of the type. */
1662 as = TYPE_ADDR_SPACE (type);
1663
80965c18
RK
1664 /* If T is not a type, we may be able to deduce some more information about
1665 the expression. */
1666 if (! TYPE_P (t))
8ac61af7 1667 {
8476af98 1668 tree base;
389fdba0 1669
8ac61af7
RK
1670 if (TREE_THIS_VOLATILE (t))
1671 MEM_VOLATILE_P (ref) = 1;
173b24b9 1672
c56e3582
RK
1673 /* Now remove any conversions: they don't change what the underlying
1674 object is. Likewise for SAVE_EXPR. */
1043771b 1675 while (CONVERT_EXPR_P (t)
c56e3582
RK
1676 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1677 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1678 t = TREE_OPERAND (t, 0);
1679
4994da65
RG
1680 /* Note whether this expression can trap. */
1681 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1682
1683 base = get_base_address (t);
f18a7b25
MJ
1684 if (base)
1685 {
1686 if (DECL_P (base)
1687 && TREE_READONLY (base)
1688 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1689 && !TREE_THIS_VOLATILE (base))
1690 MEM_READONLY_P (ref) = 1;
1691
1692 /* Mark static const strings readonly as well. */
1693 if (TREE_CODE (base) == STRING_CST
1694 && TREE_READONLY (base)
1695 && TREE_STATIC (base))
1696 MEM_READONLY_P (ref) = 1;
1697
30b0317c 1698 /* Address-space information is on the base object. */
f18a7b25
MJ
1699 if (TREE_CODE (base) == MEM_REF
1700 || TREE_CODE (base) == TARGET_MEM_REF)
1701 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1702 0))));
1703 else
1704 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1705 }
ba30e50d 1706
2039d7aa
RH
1707 /* If this expression uses it's parent's alias set, mark it such
1708 that we won't change it. */
b4ada065 1709 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
1710 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1711
8ac61af7
RK
1712 /* If this is a decl, set the attributes of the MEM from it. */
1713 if (DECL_P (t))
1714 {
f12144dd 1715 attrs.expr = t;
754c3d5d
RS
1716 attrs.offset_known_p = true;
1717 attrs.offset = 0;
6f1087be 1718 apply_bitpos = bitpos;
a787ccc3 1719 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1720 }
1721
30b0317c 1722 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1723 else if (CONSTANT_CLASS_P (t))
30b0317c 1724 ;
998d7deb 1725
a787ccc3
RS
1726 /* If this is a field reference, record it. */
1727 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1728 {
f12144dd 1729 attrs.expr = t;
754c3d5d
RS
1730 attrs.offset_known_p = true;
1731 attrs.offset = 0;
6f1087be 1732 apply_bitpos = bitpos;
a787ccc3
RS
1733 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1734 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1735 }
1736
1737 /* If this is an array reference, look for an outer field reference. */
1738 else if (TREE_CODE (t) == ARRAY_REF)
1739 {
1740 tree off_tree = size_zero_node;
1b1838b6
JW
1741 /* We can't modify t, because we use it at the end of the
1742 function. */
1743 tree t2 = t;
998d7deb
RH
1744
1745 do
1746 {
1b1838b6 1747 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1748 tree low_bound = array_ref_low_bound (t2);
1749 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1750
1751 /* We assume all arrays have sizes that are a multiple of a byte.
1752 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1753 index, then convert to sizetype and multiply by the size of
1754 the array element. */
1755 if (! integer_zerop (low_bound))
4845b383
KH
1756 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1757 index, low_bound);
2567406a 1758
44de5aeb 1759 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1760 size_binop (MULT_EXPR,
1761 fold_convert (sizetype,
1762 index),
44de5aeb
RK
1763 unit_size),
1764 off_tree);
1b1838b6 1765 t2 = TREE_OPERAND (t2, 0);
998d7deb 1766 }
1b1838b6 1767 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1768
30b0317c
RB
1769 if (DECL_P (t2)
1770 || TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1771 {
f12144dd 1772 attrs.expr = t2;
754c3d5d 1773 attrs.offset_known_p = false;
cc269bb6 1774 if (tree_fits_uhwi_p (off_tree))
6f1087be 1775 {
754c3d5d 1776 attrs.offset_known_p = true;
ae7e9ddd 1777 attrs.offset = tree_to_uhwi (off_tree);
6f1087be
RH
1778 apply_bitpos = bitpos;
1779 }
998d7deb 1780 }
30b0317c 1781 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1782 }
1783
56c47f22 1784 /* If this is an indirect reference, record it. */
70f34814 1785 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1786 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1787 {
f12144dd 1788 attrs.expr = t;
754c3d5d
RS
1789 attrs.offset_known_p = true;
1790 attrs.offset = 0;
56c47f22
RG
1791 apply_bitpos = bitpos;
1792 }
1793
30b0317c
RB
1794 /* Compute the alignment. */
1795 unsigned int obj_align;
1796 unsigned HOST_WIDE_INT obj_bitpos;
1797 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1798 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1799 if (obj_bitpos != 0)
1800 obj_align = (obj_bitpos & -obj_bitpos);
1801 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
1802 }
1803
cc269bb6 1804 if (tree_fits_uhwi_p (new_size))
a787ccc3
RS
1805 {
1806 attrs.size_known_p = true;
ae7e9ddd 1807 attrs.size = tree_to_uhwi (new_size);
a787ccc3
RS
1808 }
1809
15c812e3 1810 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1811 bit position offset. Similarly, increase the size of the accessed
1812 object to contain the negative offset. */
6f1087be 1813 if (apply_bitpos)
8c317c5f 1814 {
754c3d5d
RS
1815 gcc_assert (attrs.offset_known_p);
1816 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1817 if (attrs.size_known_p)
1818 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1819 }
6f1087be 1820
8ac61af7 1821 /* Now set the attributes we computed above. */
f18a7b25 1822 attrs.addrspace = as;
f12144dd 1823 set_mem_attrs (ref, &attrs);
173b24b9
RK
1824}
1825
6f1087be 1826void
502b8322 1827set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1828{
1829 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1830}
1831
173b24b9
RK
1832/* Set the alias set of MEM to SET. */
1833
1834void
4862826d 1835set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1836{
f12144dd
RS
1837 struct mem_attrs attrs;
1838
173b24b9 1839 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1840 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
1841 attrs = *get_mem_attrs (mem);
1842 attrs.alias = set;
1843 set_mem_attrs (mem, &attrs);
09e881c9
BE
1844}
1845
1846/* Set the address space of MEM to ADDRSPACE (target-defined). */
1847
1848void
1849set_mem_addr_space (rtx mem, addr_space_t addrspace)
1850{
f12144dd
RS
1851 struct mem_attrs attrs;
1852
1853 attrs = *get_mem_attrs (mem);
1854 attrs.addrspace = addrspace;
1855 set_mem_attrs (mem, &attrs);
173b24b9 1856}
738cc472 1857
d022d93e 1858/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1859
1860void
502b8322 1861set_mem_align (rtx mem, unsigned int align)
738cc472 1862{
f12144dd
RS
1863 struct mem_attrs attrs;
1864
1865 attrs = *get_mem_attrs (mem);
1866 attrs.align = align;
1867 set_mem_attrs (mem, &attrs);
738cc472 1868}
1285011e 1869
998d7deb 1870/* Set the expr for MEM to EXPR. */
1285011e
RK
1871
1872void
502b8322 1873set_mem_expr (rtx mem, tree expr)
1285011e 1874{
f12144dd
RS
1875 struct mem_attrs attrs;
1876
1877 attrs = *get_mem_attrs (mem);
1878 attrs.expr = expr;
1879 set_mem_attrs (mem, &attrs);
1285011e 1880}
998d7deb
RH
1881
1882/* Set the offset of MEM to OFFSET. */
1883
1884void
527210c4 1885set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 1886{
f12144dd
RS
1887 struct mem_attrs attrs;
1888
1889 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1890 attrs.offset_known_p = true;
1891 attrs.offset = offset;
527210c4
RS
1892 set_mem_attrs (mem, &attrs);
1893}
1894
1895/* Clear the offset of MEM. */
1896
1897void
1898clear_mem_offset (rtx mem)
1899{
1900 struct mem_attrs attrs;
1901
1902 attrs = *get_mem_attrs (mem);
754c3d5d 1903 attrs.offset_known_p = false;
f12144dd 1904 set_mem_attrs (mem, &attrs);
35aff10b
AM
1905}
1906
1907/* Set the size of MEM to SIZE. */
1908
1909void
f5541398 1910set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 1911{
f12144dd
RS
1912 struct mem_attrs attrs;
1913
1914 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1915 attrs.size_known_p = true;
1916 attrs.size = size;
f5541398
RS
1917 set_mem_attrs (mem, &attrs);
1918}
1919
1920/* Clear the size of MEM. */
1921
1922void
1923clear_mem_size (rtx mem)
1924{
1925 struct mem_attrs attrs;
1926
1927 attrs = *get_mem_attrs (mem);
754c3d5d 1928 attrs.size_known_p = false;
f12144dd 1929 set_mem_attrs (mem, &attrs);
998d7deb 1930}
173b24b9 1931\f
738cc472
RK
1932/* Return a memory reference like MEMREF, but with its mode changed to MODE
1933 and its address changed to ADDR. (VOIDmode means don't change the mode.
1934 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1935 returned memory location is required to be valid. The memory
1936 attributes are not changed. */
23b2ce53 1937
738cc472 1938static rtx
502b8322 1939change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1940{
09e881c9 1941 addr_space_t as;
60564289 1942 rtx new_rtx;
23b2ce53 1943
5b0264cb 1944 gcc_assert (MEM_P (memref));
09e881c9 1945 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
1946 if (mode == VOIDmode)
1947 mode = GET_MODE (memref);
1948 if (addr == 0)
1949 addr = XEXP (memref, 0);
a74ff877 1950 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 1951 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 1952 return memref;
23b2ce53 1953
91c5ee5b
VM
1954 /* Don't validate address for LRA. LRA can make the address valid
1955 by itself in most efficient way. */
1956 if (validate && !lra_in_progress)
23b2ce53 1957 {
f1ec5147 1958 if (reload_in_progress || reload_completed)
09e881c9 1959 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 1960 else
09e881c9 1961 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 1962 }
750c9258 1963
9b04c6a8
RK
1964 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1965 return memref;
1966
60564289
KG
1967 new_rtx = gen_rtx_MEM (mode, addr);
1968 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1969 return new_rtx;
23b2ce53 1970}
792760b9 1971
738cc472
RK
1972/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1973 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1974
1975rtx
502b8322 1976change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1977{
f12144dd 1978 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
60564289 1979 enum machine_mode mmode = GET_MODE (new_rtx);
f12144dd 1980 struct mem_attrs attrs, *defattrs;
4e44c1ef 1981
f12144dd
RS
1982 attrs = *get_mem_attrs (memref);
1983 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
1984 attrs.expr = NULL_TREE;
1985 attrs.offset_known_p = false;
1986 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
1987 attrs.size = defattrs->size;
1988 attrs.align = defattrs->align;
c2f7bcc3 1989
fdb1c7b3 1990 /* If there are no changes, just return the original memory reference. */
60564289 1991 if (new_rtx == memref)
4e44c1ef 1992 {
f12144dd 1993 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 1994 return new_rtx;
4e44c1ef 1995
60564289
KG
1996 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1997 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 1998 }
fdb1c7b3 1999
f12144dd 2000 set_mem_attrs (new_rtx, &attrs);
60564289 2001 return new_rtx;
f4ef873c 2002}
792760b9 2003
738cc472
RK
2004/* Return a memory reference like MEMREF, but with its mode changed
2005 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2006 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2007 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2008 and the caller is responsible for adjusting MEMREF base register.
2009 If ADJUST_OBJECT is zero, the underlying object associated with the
2010 memory reference is left unchanged and the caller is responsible for
2011 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2012 the underlying object, even partially, then the object is dropped.
2013 SIZE, if nonzero, is the size of an access in cases where MODE
2014 has no inherent size. */
f1ec5147
RK
2015
2016rtx
502b8322 2017adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2018 int validate, int adjust_address, int adjust_object,
2019 HOST_WIDE_INT size)
f1ec5147 2020{
823e3574 2021 rtx addr = XEXP (memref, 0);
60564289 2022 rtx new_rtx;
f12144dd 2023 enum machine_mode address_mode;
a6fe9ed4 2024 int pbits;
0207fa90 2025 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2026 unsigned HOST_WIDE_INT max_align;
0207fa90
EB
2027#ifdef POINTERS_EXTEND_UNSIGNED
2028 enum machine_mode pointer_mode
2029 = targetm.addr_space.pointer_mode (attrs.addrspace);
2030#endif
823e3574 2031
ee88e690
EB
2032 /* VOIDmode means no mode change for change_address_1. */
2033 if (mode == VOIDmode)
2034 mode = GET_MODE (memref);
2035
5f2cbd0d
RS
2036 /* Take the size of non-BLKmode accesses from the mode. */
2037 defattrs = mode_mem_attrs[(int) mode];
2038 if (defattrs->size_known_p)
2039 size = defattrs->size;
2040
fdb1c7b3
JH
2041 /* If there are no changes, just return the original memory reference. */
2042 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2043 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2044 && (!validate || memory_address_addr_space_p (mode, addr,
2045 attrs.addrspace)))
fdb1c7b3
JH
2046 return memref;
2047
d14419e4 2048 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2049 This may happen even if offset is nonzero -- consider
d14419e4
RH
2050 (plus (plus reg reg) const_int) -- so do this always. */
2051 addr = copy_rtx (addr);
2052
a6fe9ed4
JM
2053 /* Convert a possibly large offset to a signed value within the
2054 range of the target address space. */
372d6395 2055 address_mode = get_address_mode (memref);
d4ebfa65 2056 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2057 if (HOST_BITS_PER_WIDE_INT > pbits)
2058 {
2059 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2060 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2061 >> shift);
2062 }
2063
5ef0b50d 2064 if (adjust_address)
4a78c787
RH
2065 {
2066 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2067 object, we can merge it into the LO_SUM. */
2068 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2069 && offset >= 0
2070 && (unsigned HOST_WIDE_INT) offset
2071 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2072 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2073 plus_constant (address_mode,
2074 XEXP (addr, 1), offset));
0207fa90
EB
2075#ifdef POINTERS_EXTEND_UNSIGNED
2076 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2077 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2078 the fact that pointers are not allowed to overflow. */
2079 else if (POINTERS_EXTEND_UNSIGNED > 0
2080 && GET_CODE (addr) == ZERO_EXTEND
2081 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2082 && trunc_int_for_mode (offset, pointer_mode) == offset)
2083 addr = gen_rtx_ZERO_EXTEND (address_mode,
2084 plus_constant (pointer_mode,
2085 XEXP (addr, 0), offset));
2086#endif
4a78c787 2087 else
0a81f074 2088 addr = plus_constant (address_mode, addr, offset);
4a78c787 2089 }
823e3574 2090
60564289 2091 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472 2092
09efeca1
PB
2093 /* If the address is a REG, change_address_1 rightfully returns memref,
2094 but this would destroy memref's MEM_ATTRS. */
2095 if (new_rtx == memref && offset != 0)
2096 new_rtx = copy_rtx (new_rtx);
2097
5ef0b50d
EB
2098 /* Conservatively drop the object if we don't know where we start from. */
2099 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2100 {
2101 attrs.expr = NULL_TREE;
2102 attrs.alias = 0;
2103 }
2104
738cc472
RK
2105 /* Compute the new values of the memory attributes due to this adjustment.
2106 We add the offsets and update the alignment. */
754c3d5d 2107 if (attrs.offset_known_p)
5ef0b50d
EB
2108 {
2109 attrs.offset += offset;
2110
2111 /* Drop the object if the new left end is not within its bounds. */
2112 if (adjust_object && attrs.offset < 0)
2113 {
2114 attrs.expr = NULL_TREE;
2115 attrs.alias = 0;
2116 }
2117 }
738cc472 2118
03bf2c23
RK
2119 /* Compute the new alignment by taking the MIN of the alignment and the
2120 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2121 if zero. */
2122 if (offset != 0)
f12144dd
RS
2123 {
2124 max_align = (offset & -offset) * BITS_PER_UNIT;
2125 attrs.align = MIN (attrs.align, max_align);
2126 }
738cc472 2127
5f2cbd0d 2128 if (size)
754c3d5d 2129 {
5ef0b50d 2130 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2131 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2132 {
2133 attrs.expr = NULL_TREE;
2134 attrs.alias = 0;
2135 }
754c3d5d 2136 attrs.size_known_p = true;
5f2cbd0d 2137 attrs.size = size;
754c3d5d
RS
2138 }
2139 else if (attrs.size_known_p)
5ef0b50d 2140 {
5f2cbd0d 2141 gcc_assert (!adjust_object);
5ef0b50d 2142 attrs.size -= offset;
5f2cbd0d
RS
2143 /* ??? The store_by_pieces machinery generates negative sizes,
2144 so don't assert for that here. */
5ef0b50d 2145 }
10b76d73 2146
f12144dd 2147 set_mem_attrs (new_rtx, &attrs);
738cc472 2148
60564289 2149 return new_rtx;
f1ec5147
RK
2150}
2151
630036c6
JJ
2152/* Return a memory reference like MEMREF, but with its mode changed
2153 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2154 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2155 nonzero, the memory address is forced to be valid. */
2156
2157rtx
502b8322
AJ
2158adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2159 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2160{
2161 memref = change_address_1 (memref, VOIDmode, addr, validate);
5f2cbd0d 2162 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2163}
2164
8ac61af7
RK
2165/* Return a memory reference like MEMREF, but whose address is changed by
2166 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2167 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2168
2169rtx
502b8322 2170offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2171{
60564289 2172 rtx new_rtx, addr = XEXP (memref, 0);
f12144dd 2173 enum machine_mode address_mode;
754c3d5d 2174 struct mem_attrs attrs, *defattrs;
e3c8ea67 2175
f12144dd 2176 attrs = *get_mem_attrs (memref);
372d6395 2177 address_mode = get_address_mode (memref);
d4ebfa65 2178 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2179
68252e27 2180 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2181 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2182
2183 However, if we did go and rearrange things, we can wind up not
2184 being able to recognize the magic around pic_offset_table_rtx.
2185 This stuff is fragile, and is yet another example of why it is
2186 bad to expose PIC machinery too early. */
f12144dd
RS
2187 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2188 attrs.addrspace)
e3c8ea67
RH
2189 && GET_CODE (addr) == PLUS
2190 && XEXP (addr, 0) == pic_offset_table_rtx)
2191 {
2192 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2193 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2194 }
2195
60564289
KG
2196 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2197 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2198
fdb1c7b3 2199 /* If there are no changes, just return the original memory reference. */
60564289
KG
2200 if (new_rtx == memref)
2201 return new_rtx;
fdb1c7b3 2202
0d4903b8
RK
2203 /* Update the alignment to reflect the offset. Reset the offset, which
2204 we don't know. */
754c3d5d
RS
2205 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2206 attrs.offset_known_p = false;
2207 attrs.size_known_p = defattrs->size_known_p;
2208 attrs.size = defattrs->size;
f12144dd
RS
2209 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2210 set_mem_attrs (new_rtx, &attrs);
60564289 2211 return new_rtx;
0d4903b8 2212}
68252e27 2213
792760b9
RK
2214/* Return a memory reference like MEMREF, but with its address changed to
2215 ADDR. The caller is asserting that the actual piece of memory pointed
2216 to is the same, just the form of the address is being changed, such as
2217 by putting something into a register. */
2218
2219rtx
502b8322 2220replace_equiv_address (rtx memref, rtx addr)
792760b9 2221{
738cc472
RK
2222 /* change_address_1 copies the memory attribute structure without change
2223 and that's exactly what we want here. */
40c0668b 2224 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2225 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2226}
738cc472 2227
f1ec5147
RK
2228/* Likewise, but the reference is not required to be valid. */
2229
2230rtx
502b8322 2231replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2232{
f1ec5147
RK
2233 return change_address_1 (memref, VOIDmode, addr, 0);
2234}
e7dfe4bb
RH
2235
2236/* Return a memory reference like MEMREF, but with its mode widened to
2237 MODE and offset by OFFSET. This would be used by targets that e.g.
2238 cannot issue QImode memory operations and have to use SImode memory
2239 operations plus masking logic. */
2240
2241rtx
502b8322 2242widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2243{
5f2cbd0d 2244 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2245 struct mem_attrs attrs;
e7dfe4bb
RH
2246 unsigned int size = GET_MODE_SIZE (mode);
2247
fdb1c7b3 2248 /* If there are no changes, just return the original memory reference. */
60564289
KG
2249 if (new_rtx == memref)
2250 return new_rtx;
fdb1c7b3 2251
f12144dd
RS
2252 attrs = *get_mem_attrs (new_rtx);
2253
e7dfe4bb
RH
2254 /* If we don't know what offset we were at within the expression, then
2255 we can't know if we've overstepped the bounds. */
754c3d5d 2256 if (! attrs.offset_known_p)
f12144dd 2257 attrs.expr = NULL_TREE;
e7dfe4bb 2258
f12144dd 2259 while (attrs.expr)
e7dfe4bb 2260 {
f12144dd 2261 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2262 {
f12144dd
RS
2263 tree field = TREE_OPERAND (attrs.expr, 1);
2264 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2265
2266 if (! DECL_SIZE_UNIT (field))
2267 {
f12144dd 2268 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2269 break;
2270 }
2271
2272 /* Is the field at least as large as the access? If so, ok,
2273 otherwise strip back to the containing structure. */
03667700
RK
2274 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2275 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2276 && attrs.offset >= 0)
e7dfe4bb
RH
2277 break;
2278
cc269bb6 2279 if (! tree_fits_uhwi_p (offset))
e7dfe4bb 2280 {
f12144dd 2281 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2282 break;
2283 }
2284
f12144dd 2285 attrs.expr = TREE_OPERAND (attrs.expr, 0);
ae7e9ddd
RS
2286 attrs.offset += tree_to_uhwi (offset);
2287 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2288 / BITS_PER_UNIT);
e7dfe4bb
RH
2289 }
2290 /* Similarly for the decl. */
f12144dd
RS
2291 else if (DECL_P (attrs.expr)
2292 && DECL_SIZE_UNIT (attrs.expr)
2293 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2294 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2295 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2296 break;
2297 else
2298 {
2299 /* The widened memory access overflows the expression, which means
2300 that it could alias another expression. Zap it. */
f12144dd 2301 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2302 break;
2303 }
2304 }
2305
f12144dd 2306 if (! attrs.expr)
754c3d5d 2307 attrs.offset_known_p = false;
e7dfe4bb
RH
2308
2309 /* The widened memory may alias other stuff, so zap the alias set. */
2310 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2311 attrs.alias = 0;
754c3d5d
RS
2312 attrs.size_known_p = true;
2313 attrs.size = size;
f12144dd 2314 set_mem_attrs (new_rtx, &attrs);
60564289 2315 return new_rtx;
e7dfe4bb 2316}
23b2ce53 2317\f
f6129d66
RH
2318/* A fake decl that is used as the MEM_EXPR of spill slots. */
2319static GTY(()) tree spill_slot_decl;
2320
3d7e23f6
RH
2321tree
2322get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2323{
2324 tree d = spill_slot_decl;
2325 rtx rd;
f12144dd 2326 struct mem_attrs attrs;
f6129d66 2327
3d7e23f6 2328 if (d || !force_build_p)
f6129d66
RH
2329 return d;
2330
c2255bc4
AH
2331 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2332 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2333 DECL_ARTIFICIAL (d) = 1;
2334 DECL_IGNORED_P (d) = 1;
2335 TREE_USED (d) = 1;
f6129d66
RH
2336 spill_slot_decl = d;
2337
2338 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2339 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2340 attrs = *mode_mem_attrs[(int) BLKmode];
2341 attrs.alias = new_alias_set ();
2342 attrs.expr = d;
2343 set_mem_attrs (rd, &attrs);
f6129d66
RH
2344 SET_DECL_RTL (d, rd);
2345
2346 return d;
2347}
2348
2349/* Given MEM, a result from assign_stack_local, fill in the memory
2350 attributes as appropriate for a register allocator spill slot.
2351 These slots are not aliasable by other memory. We arrange for
2352 them all to use a single MEM_EXPR, so that the aliasing code can
2353 work properly in the case of shared spill slots. */
2354
2355void
2356set_mem_attrs_for_spill (rtx mem)
2357{
f12144dd
RS
2358 struct mem_attrs attrs;
2359 rtx addr;
f6129d66 2360
f12144dd
RS
2361 attrs = *get_mem_attrs (mem);
2362 attrs.expr = get_spill_slot_decl (true);
2363 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2364 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2365
2366 /* We expect the incoming memory to be of the form:
2367 (mem:MODE (plus (reg sfp) (const_int offset)))
2368 with perhaps the plus missing for offset = 0. */
2369 addr = XEXP (mem, 0);
754c3d5d
RS
2370 attrs.offset_known_p = true;
2371 attrs.offset = 0;
f6129d66 2372 if (GET_CODE (addr) == PLUS
481683e1 2373 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2374 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2375
f12144dd 2376 set_mem_attrs (mem, &attrs);
f6129d66
RH
2377 MEM_NOTRAP_P (mem) = 1;
2378}
2379\f
23b2ce53
RS
2380/* Return a newly created CODE_LABEL rtx with a unique label number. */
2381
2382rtx
502b8322 2383gen_label_rtx (void)
23b2ce53 2384{
0dc36574 2385 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2386 NULL, label_num++, NULL);
23b2ce53
RS
2387}
2388\f
2389/* For procedure integration. */
2390
23b2ce53 2391/* Install new pointers to the first and last insns in the chain.
86fe05e0 2392 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2393 Used for an inline-procedure after copying the insn chain. */
2394
2395void
502b8322 2396set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2397{
86fe05e0
RK
2398 rtx insn;
2399
5936d944
JH
2400 set_first_insn (first);
2401 set_last_insn (last);
86fe05e0
RK
2402 cur_insn_uid = 0;
2403
b5b8b0ac
AO
2404 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2405 {
2406 int debug_count = 0;
2407
2408 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2409 cur_debug_insn_uid = 0;
2410
2411 for (insn = first; insn; insn = NEXT_INSN (insn))
2412 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2413 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2414 else
2415 {
2416 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2417 if (DEBUG_INSN_P (insn))
2418 debug_count++;
2419 }
2420
2421 if (debug_count)
2422 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2423 else
2424 cur_debug_insn_uid++;
2425 }
2426 else
2427 for (insn = first; insn; insn = NEXT_INSN (insn))
2428 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2429
2430 cur_insn_uid++;
23b2ce53 2431}
23b2ce53 2432\f
750c9258 2433/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2434 structure. This routine should only be called once. */
23b2ce53 2435
fd743bc1 2436static void
b4aaa77b 2437unshare_all_rtl_1 (rtx insn)
23b2ce53 2438{
d1b81779 2439 /* Unshare just about everything else. */
2c07f13b 2440 unshare_all_rtl_in_chain (insn);
750c9258 2441
23b2ce53
RS
2442 /* Make sure the addresses of stack slots found outside the insn chain
2443 (such as, in DECL_RTL of a variable) are not shared
2444 with the insn chain.
2445
2446 This special care is necessary when the stack slot MEM does not
2447 actually appear in the insn chain. If it does appear, its address
2448 is unshared from all else at that point. */
242b0ce6 2449 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2450}
2451
750c9258 2452/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2453 structure, again. This is a fairly expensive thing to do so it
2454 should be done sparingly. */
2455
2456void
502b8322 2457unshare_all_rtl_again (rtx insn)
d1b81779
GK
2458{
2459 rtx p;
624c87aa
RE
2460 tree decl;
2461
d1b81779 2462 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2463 if (INSN_P (p))
d1b81779
GK
2464 {
2465 reset_used_flags (PATTERN (p));
2466 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2467 if (CALL_P (p))
2468 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2469 }
624c87aa 2470
2d4aecb3 2471 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2472 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2473
624c87aa 2474 /* Make sure that virtual parameters are not shared. */
910ad8de 2475 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2476 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2477
2478 reset_used_flags (stack_slot_list);
2479
b4aaa77b 2480 unshare_all_rtl_1 (insn);
fd743bc1
PB
2481}
2482
c2924966 2483unsigned int
fd743bc1
PB
2484unshare_all_rtl (void)
2485{
b4aaa77b 2486 unshare_all_rtl_1 (get_insns ());
c2924966 2487 return 0;
d1b81779
GK
2488}
2489
ef330312 2490
2c07f13b
JH
2491/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2492 Recursively does the same for subexpressions. */
2493
2494static void
2495verify_rtx_sharing (rtx orig, rtx insn)
2496{
2497 rtx x = orig;
2498 int i;
2499 enum rtx_code code;
2500 const char *format_ptr;
2501
2502 if (x == 0)
2503 return;
2504
2505 code = GET_CODE (x);
2506
2507 /* These types may be freely shared. */
2508
2509 switch (code)
2510 {
2511 case REG:
0ca5af51
AO
2512 case DEBUG_EXPR:
2513 case VALUE:
d8116890 2514 CASE_CONST_ANY:
2c07f13b
JH
2515 case SYMBOL_REF:
2516 case LABEL_REF:
2517 case CODE_LABEL:
2518 case PC:
2519 case CC0:
3810076b 2520 case RETURN:
26898771 2521 case SIMPLE_RETURN:
2c07f13b 2522 case SCRATCH:
3e89ed8d 2523 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2524 return;
3e89ed8d 2525 case CLOBBER:
c5c5ba89
JH
2526 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2527 clobbers or clobbers of hard registers that originated as pseudos.
2528 This is needed to allow safe register renaming. */
2529 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2530 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2531 return;
2532 break;
2c07f13b
JH
2533
2534 case CONST:
6fb5fa3c 2535 if (shared_const_p (orig))
2c07f13b
JH
2536 return;
2537 break;
2538
2539 case MEM:
2540 /* A MEM is allowed to be shared if its address is constant. */
2541 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2542 || reload_completed || reload_in_progress)
2543 return;
2544
2545 break;
2546
2547 default:
2548 break;
2549 }
2550
2551 /* This rtx may not be shared. If it has already been seen,
2552 replace it with a copy of itself. */
1a2caa7a 2553#ifdef ENABLE_CHECKING
2c07f13b
JH
2554 if (RTX_FLAG (x, used))
2555 {
ab532386 2556 error ("invalid rtl sharing found in the insn");
2c07f13b 2557 debug_rtx (insn);
ab532386 2558 error ("shared rtx");
2c07f13b 2559 debug_rtx (x);
ab532386 2560 internal_error ("internal consistency failure");
2c07f13b 2561 }
1a2caa7a
NS
2562#endif
2563 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2564
2c07f13b
JH
2565 RTX_FLAG (x, used) = 1;
2566
6614fd40 2567 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2568
2569 format_ptr = GET_RTX_FORMAT (code);
2570
2571 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2572 {
2573 switch (*format_ptr++)
2574 {
2575 case 'e':
2576 verify_rtx_sharing (XEXP (x, i), insn);
2577 break;
2578
2579 case 'E':
2580 if (XVEC (x, i) != NULL)
2581 {
2582 int j;
2583 int len = XVECLEN (x, i);
2584
2585 for (j = 0; j < len; j++)
2586 {
1a2caa7a
NS
2587 /* We allow sharing of ASM_OPERANDS inside single
2588 instruction. */
2c07f13b 2589 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2590 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2591 == ASM_OPERANDS))
2c07f13b
JH
2592 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2593 else
2594 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2595 }
2596 }
2597 break;
2598 }
2599 }
2600 return;
2601}
2602
0e0f87d4
SB
2603/* Reset used-flags for INSN. */
2604
2605static void
2606reset_insn_used_flags (rtx insn)
2607{
2608 gcc_assert (INSN_P (insn));
2609 reset_used_flags (PATTERN (insn));
2610 reset_used_flags (REG_NOTES (insn));
2611 if (CALL_P (insn))
2612 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2613}
2614
a24243a0 2615/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2616
a24243a0
AK
2617static void
2618reset_all_used_flags (void)
2c07f13b
JH
2619{
2620 rtx p;
2621
2622 for (p = get_insns (); p; p = NEXT_INSN (p))
2623 if (INSN_P (p))
2624 {
0e0f87d4
SB
2625 rtx pat = PATTERN (p);
2626 if (GET_CODE (pat) != SEQUENCE)
2627 reset_insn_used_flags (p);
2628 else
2954a813 2629 {
0e0f87d4
SB
2630 gcc_assert (REG_NOTES (p) == NULL);
2631 for (int i = 0; i < XVECLEN (pat, 0); i++)
2632 reset_insn_used_flags (XVECEXP (pat, 0, i));
2954a813 2633 }
2c07f13b 2634 }
a24243a0
AK
2635}
2636
0e0f87d4
SB
2637/* Verify sharing in INSN. */
2638
2639static void
2640verify_insn_sharing (rtx insn)
2641{
2642 gcc_assert (INSN_P (insn));
2643 reset_used_flags (PATTERN (insn));
2644 reset_used_flags (REG_NOTES (insn));
2645 if (CALL_P (insn))
2646 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2647}
2648
a24243a0
AK
2649/* Go through all the RTL insn bodies and check that there is no unexpected
2650 sharing in between the subexpressions. */
2651
2652DEBUG_FUNCTION void
2653verify_rtl_sharing (void)
2654{
2655 rtx p;
2656
2657 timevar_push (TV_VERIFY_RTL_SHARING);
2658
2659 reset_all_used_flags ();
2c07f13b
JH
2660
2661 for (p = get_insns (); p; p = NEXT_INSN (p))
2662 if (INSN_P (p))
2663 {
0e0f87d4
SB
2664 rtx pat = PATTERN (p);
2665 if (GET_CODE (pat) != SEQUENCE)
2666 verify_insn_sharing (p);
2667 else
2668 for (int i = 0; i < XVECLEN (pat, 0); i++)
2669 verify_insn_sharing (XVECEXP (pat, 0, i));
2c07f13b 2670 }
a222c01a 2671
a24243a0
AK
2672 reset_all_used_flags ();
2673
a222c01a 2674 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2675}
2676
d1b81779
GK
2677/* Go through all the RTL insn bodies and copy any invalid shared structure.
2678 Assumes the mark bits are cleared at entry. */
2679
2c07f13b
JH
2680void
2681unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2682{
2683 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2684 if (INSN_P (insn))
d1b81779
GK
2685 {
2686 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2687 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2688 if (CALL_P (insn))
2689 CALL_INSN_FUNCTION_USAGE (insn)
2690 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2691 }
2692}
2693
2d4aecb3 2694/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2695 shared. We never replace the DECL_RTLs themselves with a copy,
2696 but expressions mentioned into a DECL_RTL cannot be shared with
2697 expressions in the instruction stream.
2698
2699 Note that reload may convert pseudo registers into memories in-place.
2700 Pseudo registers are always shared, but MEMs never are. Thus if we
2701 reset the used flags on MEMs in the instruction stream, we must set
2702 them again on MEMs that appear in DECL_RTLs. */
2703
2d4aecb3 2704static void
5eb2a9f2 2705set_used_decls (tree blk)
2d4aecb3
AO
2706{
2707 tree t;
2708
2709 /* Mark decls. */
910ad8de 2710 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2711 if (DECL_RTL_SET_P (t))
5eb2a9f2 2712 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2713
2714 /* Now process sub-blocks. */
87caf699 2715 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2716 set_used_decls (t);
2d4aecb3
AO
2717}
2718
23b2ce53 2719/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2720 Recursively does the same for subexpressions. Uses
2721 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2722
2723rtx
502b8322 2724copy_rtx_if_shared (rtx orig)
23b2ce53 2725{
32b32b16
AP
2726 copy_rtx_if_shared_1 (&orig);
2727 return orig;
2728}
2729
ff954f39
AP
2730/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2731 use. Recursively does the same for subexpressions. */
2732
32b32b16
AP
2733static void
2734copy_rtx_if_shared_1 (rtx *orig1)
2735{
2736 rtx x;
b3694847
SS
2737 int i;
2738 enum rtx_code code;
32b32b16 2739 rtx *last_ptr;
b3694847 2740 const char *format_ptr;
23b2ce53 2741 int copied = 0;
32b32b16
AP
2742 int length;
2743
2744 /* Repeat is used to turn tail-recursion into iteration. */
2745repeat:
2746 x = *orig1;
23b2ce53
RS
2747
2748 if (x == 0)
32b32b16 2749 return;
23b2ce53
RS
2750
2751 code = GET_CODE (x);
2752
2753 /* These types may be freely shared. */
2754
2755 switch (code)
2756 {
2757 case REG:
0ca5af51
AO
2758 case DEBUG_EXPR:
2759 case VALUE:
d8116890 2760 CASE_CONST_ANY:
23b2ce53 2761 case SYMBOL_REF:
2c07f13b 2762 case LABEL_REF:
23b2ce53
RS
2763 case CODE_LABEL:
2764 case PC:
2765 case CC0:
276e0224 2766 case RETURN:
26898771 2767 case SIMPLE_RETURN:
23b2ce53 2768 case SCRATCH:
0f41302f 2769 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2770 return;
3e89ed8d 2771 case CLOBBER:
c5c5ba89
JH
2772 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2773 clobbers or clobbers of hard registers that originated as pseudos.
2774 This is needed to allow safe register renaming. */
2775 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2776 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2777 return;
2778 break;
23b2ce53 2779
b851ea09 2780 case CONST:
6fb5fa3c 2781 if (shared_const_p (x))
32b32b16 2782 return;
b851ea09
RK
2783 break;
2784
b5b8b0ac 2785 case DEBUG_INSN:
23b2ce53
RS
2786 case INSN:
2787 case JUMP_INSN:
2788 case CALL_INSN:
2789 case NOTE:
23b2ce53
RS
2790 case BARRIER:
2791 /* The chain of insns is not being copied. */
32b32b16 2792 return;
23b2ce53 2793
e9a25f70
JL
2794 default:
2795 break;
23b2ce53
RS
2796 }
2797
2798 /* This rtx may not be shared. If it has already been seen,
2799 replace it with a copy of itself. */
2800
2adc7f12 2801 if (RTX_FLAG (x, used))
23b2ce53 2802 {
aacd3885 2803 x = shallow_copy_rtx (x);
23b2ce53
RS
2804 copied = 1;
2805 }
2adc7f12 2806 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2807
2808 /* Now scan the subexpressions recursively.
2809 We can store any replaced subexpressions directly into X
2810 since we know X is not shared! Any vectors in X
2811 must be copied if X was copied. */
2812
2813 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2814 length = GET_RTX_LENGTH (code);
2815 last_ptr = NULL;
b8698a0f 2816
32b32b16 2817 for (i = 0; i < length; i++)
23b2ce53
RS
2818 {
2819 switch (*format_ptr++)
2820 {
2821 case 'e':
32b32b16
AP
2822 if (last_ptr)
2823 copy_rtx_if_shared_1 (last_ptr);
2824 last_ptr = &XEXP (x, i);
23b2ce53
RS
2825 break;
2826
2827 case 'E':
2828 if (XVEC (x, i) != NULL)
2829 {
b3694847 2830 int j;
f0722107 2831 int len = XVECLEN (x, i);
b8698a0f 2832
6614fd40
KH
2833 /* Copy the vector iff I copied the rtx and the length
2834 is nonzero. */
f0722107 2835 if (copied && len > 0)
8f985ec4 2836 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2837
5d3cc252 2838 /* Call recursively on all inside the vector. */
f0722107 2839 for (j = 0; j < len; j++)
32b32b16
AP
2840 {
2841 if (last_ptr)
2842 copy_rtx_if_shared_1 (last_ptr);
2843 last_ptr = &XVECEXP (x, i, j);
2844 }
23b2ce53
RS
2845 }
2846 break;
2847 }
2848 }
32b32b16
AP
2849 *orig1 = x;
2850 if (last_ptr)
2851 {
2852 orig1 = last_ptr;
2853 goto repeat;
2854 }
2855 return;
23b2ce53
RS
2856}
2857
76369a82 2858/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 2859
76369a82
NF
2860static void
2861mark_used_flags (rtx x, int flag)
23b2ce53 2862{
b3694847
SS
2863 int i, j;
2864 enum rtx_code code;
2865 const char *format_ptr;
32b32b16 2866 int length;
23b2ce53 2867
32b32b16
AP
2868 /* Repeat is used to turn tail-recursion into iteration. */
2869repeat:
23b2ce53
RS
2870 if (x == 0)
2871 return;
2872
2873 code = GET_CODE (x);
2874
9faa82d8 2875 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2876 for them. */
2877
2878 switch (code)
2879 {
2880 case REG:
0ca5af51
AO
2881 case DEBUG_EXPR:
2882 case VALUE:
d8116890 2883 CASE_CONST_ANY:
23b2ce53
RS
2884 case SYMBOL_REF:
2885 case CODE_LABEL:
2886 case PC:
2887 case CC0:
276e0224 2888 case RETURN:
26898771 2889 case SIMPLE_RETURN:
23b2ce53
RS
2890 return;
2891
b5b8b0ac 2892 case DEBUG_INSN:
23b2ce53
RS
2893 case INSN:
2894 case JUMP_INSN:
2895 case CALL_INSN:
2896 case NOTE:
2897 case LABEL_REF:
2898 case BARRIER:
2899 /* The chain of insns is not being copied. */
2900 return;
750c9258 2901
e9a25f70
JL
2902 default:
2903 break;
23b2ce53
RS
2904 }
2905
76369a82 2906 RTX_FLAG (x, used) = flag;
23b2ce53
RS
2907
2908 format_ptr = GET_RTX_FORMAT (code);
32b32b16 2909 length = GET_RTX_LENGTH (code);
b8698a0f 2910
32b32b16 2911 for (i = 0; i < length; i++)
23b2ce53
RS
2912 {
2913 switch (*format_ptr++)
2914 {
2915 case 'e':
32b32b16
AP
2916 if (i == length-1)
2917 {
2918 x = XEXP (x, i);
2919 goto repeat;
2920 }
76369a82 2921 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
2922 break;
2923
2924 case 'E':
2925 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 2926 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
2927 break;
2928 }
2929 }
2930}
2c07f13b 2931
76369a82 2932/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
2933 to look for shared sub-parts. */
2934
2935void
76369a82 2936reset_used_flags (rtx x)
2c07f13b 2937{
76369a82
NF
2938 mark_used_flags (x, 0);
2939}
2c07f13b 2940
76369a82
NF
2941/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2942 to look for shared sub-parts. */
2c07f13b 2943
76369a82
NF
2944void
2945set_used_flags (rtx x)
2946{
2947 mark_used_flags (x, 1);
2c07f13b 2948}
23b2ce53
RS
2949\f
2950/* Copy X if necessary so that it won't be altered by changes in OTHER.
2951 Return X or the rtx for the pseudo reg the value of X was copied into.
2952 OTHER must be valid as a SET_DEST. */
2953
2954rtx
502b8322 2955make_safe_from (rtx x, rtx other)
23b2ce53
RS
2956{
2957 while (1)
2958 switch (GET_CODE (other))
2959 {
2960 case SUBREG:
2961 other = SUBREG_REG (other);
2962 break;
2963 case STRICT_LOW_PART:
2964 case SIGN_EXTEND:
2965 case ZERO_EXTEND:
2966 other = XEXP (other, 0);
2967 break;
2968 default:
2969 goto done;
2970 }
2971 done:
3c0cb5de 2972 if ((MEM_P (other)
23b2ce53 2973 && ! CONSTANT_P (x)
f8cfc6aa 2974 && !REG_P (x)
23b2ce53 2975 && GET_CODE (x) != SUBREG)
f8cfc6aa 2976 || (REG_P (other)
23b2ce53
RS
2977 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2978 || reg_mentioned_p (other, x))))
2979 {
2980 rtx temp = gen_reg_rtx (GET_MODE (x));
2981 emit_move_insn (temp, x);
2982 return temp;
2983 }
2984 return x;
2985}
2986\f
2987/* Emission of insns (adding them to the doubly-linked list). */
2988
23b2ce53
RS
2989/* Return the last insn emitted, even if it is in a sequence now pushed. */
2990
2991rtx
502b8322 2992get_last_insn_anywhere (void)
23b2ce53
RS
2993{
2994 struct sequence_stack *stack;
5936d944
JH
2995 if (get_last_insn ())
2996 return get_last_insn ();
49ad7cfa 2997 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2998 if (stack->last != 0)
2999 return stack->last;
3000 return 0;
3001}
3002
2a496e8b
JDA
3003/* Return the first nonnote insn emitted in current sequence or current
3004 function. This routine looks inside SEQUENCEs. */
3005
3006rtx
502b8322 3007get_first_nonnote_insn (void)
2a496e8b 3008{
5936d944 3009 rtx insn = get_insns ();
91373fe8
JDA
3010
3011 if (insn)
3012 {
3013 if (NOTE_P (insn))
3014 for (insn = next_insn (insn);
3015 insn && NOTE_P (insn);
3016 insn = next_insn (insn))
3017 continue;
3018 else
3019 {
2ca202e7 3020 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
3021 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3022 insn = XVECEXP (PATTERN (insn), 0, 0);
3023 }
3024 }
2a496e8b
JDA
3025
3026 return insn;
3027}
3028
3029/* Return the last nonnote insn emitted in current sequence or current
3030 function. This routine looks inside SEQUENCEs. */
3031
3032rtx
502b8322 3033get_last_nonnote_insn (void)
2a496e8b 3034{
5936d944 3035 rtx insn = get_last_insn ();
91373fe8
JDA
3036
3037 if (insn)
3038 {
3039 if (NOTE_P (insn))
3040 for (insn = previous_insn (insn);
3041 insn && NOTE_P (insn);
3042 insn = previous_insn (insn))
3043 continue;
3044 else
3045 {
2ca202e7 3046 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
3047 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3048 insn = XVECEXP (PATTERN (insn), 0,
3049 XVECLEN (PATTERN (insn), 0) - 1);
3050 }
3051 }
2a496e8b
JDA
3052
3053 return insn;
3054}
3055
b5b8b0ac
AO
3056/* Return the number of actual (non-debug) insns emitted in this
3057 function. */
3058
3059int
3060get_max_insn_count (void)
3061{
3062 int n = cur_insn_uid;
3063
3064 /* The table size must be stable across -g, to avoid codegen
3065 differences due to debug insns, and not be affected by
3066 -fmin-insn-uid, to avoid excessive table size and to simplify
3067 debugging of -fcompare-debug failures. */
3068 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3069 n -= cur_debug_insn_uid;
3070 else
3071 n -= MIN_NONDEBUG_INSN_UID;
3072
3073 return n;
3074}
3075
23b2ce53
RS
3076\f
3077/* Return the next insn. If it is a SEQUENCE, return the first insn
3078 of the sequence. */
3079
3080rtx
502b8322 3081next_insn (rtx insn)
23b2ce53 3082{
75547801
KG
3083 if (insn)
3084 {
3085 insn = NEXT_INSN (insn);
3086 if (insn && NONJUMP_INSN_P (insn)
3087 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3088 insn = XVECEXP (PATTERN (insn), 0, 0);
3089 }
23b2ce53 3090
75547801 3091 return insn;
23b2ce53
RS
3092}
3093
3094/* Return the previous insn. If it is a SEQUENCE, return the last insn
3095 of the sequence. */
3096
3097rtx
502b8322 3098previous_insn (rtx insn)
23b2ce53 3099{
75547801
KG
3100 if (insn)
3101 {
3102 insn = PREV_INSN (insn);
3103 if (insn && NONJUMP_INSN_P (insn)
3104 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3105 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3106 }
23b2ce53 3107
75547801 3108 return insn;
23b2ce53
RS
3109}
3110
3111/* Return the next insn after INSN that is not a NOTE. This routine does not
3112 look inside SEQUENCEs. */
3113
3114rtx
502b8322 3115next_nonnote_insn (rtx insn)
23b2ce53 3116{
75547801
KG
3117 while (insn)
3118 {
3119 insn = NEXT_INSN (insn);
3120 if (insn == 0 || !NOTE_P (insn))
3121 break;
3122 }
23b2ce53 3123
75547801 3124 return insn;
23b2ce53
RS
3125}
3126
1e211590
DD
3127/* Return the next insn after INSN that is not a NOTE, but stop the
3128 search before we enter another basic block. This routine does not
3129 look inside SEQUENCEs. */
3130
3131rtx
3132next_nonnote_insn_bb (rtx insn)
3133{
3134 while (insn)
3135 {
3136 insn = NEXT_INSN (insn);
3137 if (insn == 0 || !NOTE_P (insn))
3138 break;
3139 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3140 return NULL_RTX;
3141 }
3142
3143 return insn;
3144}
3145
23b2ce53
RS
3146/* Return the previous insn before INSN that is not a NOTE. This routine does
3147 not look inside SEQUENCEs. */
3148
3149rtx
502b8322 3150prev_nonnote_insn (rtx insn)
23b2ce53 3151{
75547801
KG
3152 while (insn)
3153 {
3154 insn = PREV_INSN (insn);
3155 if (insn == 0 || !NOTE_P (insn))
3156 break;
3157 }
23b2ce53 3158
75547801 3159 return insn;
23b2ce53
RS
3160}
3161
896aa4ea
DD
3162/* Return the previous insn before INSN that is not a NOTE, but stop
3163 the search before we enter another basic block. This routine does
3164 not look inside SEQUENCEs. */
3165
3166rtx
3167prev_nonnote_insn_bb (rtx insn)
3168{
3169 while (insn)
3170 {
3171 insn = PREV_INSN (insn);
3172 if (insn == 0 || !NOTE_P (insn))
3173 break;
3174 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3175 return NULL_RTX;
3176 }
3177
3178 return insn;
3179}
3180
b5b8b0ac
AO
3181/* Return the next insn after INSN that is not a DEBUG_INSN. This
3182 routine does not look inside SEQUENCEs. */
3183
3184rtx
3185next_nondebug_insn (rtx insn)
3186{
3187 while (insn)
3188 {
3189 insn = NEXT_INSN (insn);
3190 if (insn == 0 || !DEBUG_INSN_P (insn))
3191 break;
3192 }
3193
3194 return insn;
3195}
3196
3197/* Return the previous insn before INSN that is not a DEBUG_INSN.
3198 This routine does not look inside SEQUENCEs. */
3199
3200rtx
3201prev_nondebug_insn (rtx insn)
3202{
3203 while (insn)
3204 {
3205 insn = PREV_INSN (insn);
3206 if (insn == 0 || !DEBUG_INSN_P (insn))
3207 break;
3208 }
3209
3210 return insn;
3211}
3212
f0fc0803
JJ
3213/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3214 This routine does not look inside SEQUENCEs. */
3215
3216rtx
3217next_nonnote_nondebug_insn (rtx insn)
3218{
3219 while (insn)
3220 {
3221 insn = NEXT_INSN (insn);
3222 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3223 break;
3224 }
3225
3226 return insn;
3227}
3228
3229/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3230 This routine does not look inside SEQUENCEs. */
3231
3232rtx
3233prev_nonnote_nondebug_insn (rtx insn)
3234{
3235 while (insn)
3236 {
3237 insn = PREV_INSN (insn);
3238 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3239 break;
3240 }
3241
3242 return insn;
3243}
3244
23b2ce53
RS
3245/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3246 or 0, if there is none. This routine does not look inside
0f41302f 3247 SEQUENCEs. */
23b2ce53
RS
3248
3249rtx
502b8322 3250next_real_insn (rtx insn)
23b2ce53 3251{
75547801
KG
3252 while (insn)
3253 {
3254 insn = NEXT_INSN (insn);
3255 if (insn == 0 || INSN_P (insn))
3256 break;
3257 }
23b2ce53 3258
75547801 3259 return insn;
23b2ce53
RS
3260}
3261
3262/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3263 or 0, if there is none. This routine does not look inside
3264 SEQUENCEs. */
3265
3266rtx
502b8322 3267prev_real_insn (rtx insn)
23b2ce53 3268{
75547801
KG
3269 while (insn)
3270 {
3271 insn = PREV_INSN (insn);
3272 if (insn == 0 || INSN_P (insn))
3273 break;
3274 }
23b2ce53 3275
75547801 3276 return insn;
23b2ce53
RS
3277}
3278
ee960939
OH
3279/* Return the last CALL_INSN in the current list, or 0 if there is none.
3280 This routine does not look inside SEQUENCEs. */
3281
3282rtx
502b8322 3283last_call_insn (void)
ee960939
OH
3284{
3285 rtx insn;
3286
3287 for (insn = get_last_insn ();
4b4bf941 3288 insn && !CALL_P (insn);
ee960939
OH
3289 insn = PREV_INSN (insn))
3290 ;
3291
3292 return insn;
3293}
3294
23b2ce53 3295/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3296 does not look inside SEQUENCEs. After reload this also skips over
3297 standalone USE and CLOBBER insn. */
23b2ce53 3298
69732dcb 3299int
4f588890 3300active_insn_p (const_rtx insn)
69732dcb 3301{
4b4bf941 3302 return (CALL_P (insn) || JUMP_P (insn)
39718607 3303 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3304 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3305 && (! reload_completed
3306 || (GET_CODE (PATTERN (insn)) != USE
3307 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3308}
3309
23b2ce53 3310rtx
502b8322 3311next_active_insn (rtx insn)
23b2ce53 3312{
75547801
KG
3313 while (insn)
3314 {
3315 insn = NEXT_INSN (insn);
3316 if (insn == 0 || active_insn_p (insn))
3317 break;
3318 }
23b2ce53 3319
75547801 3320 return insn;
23b2ce53
RS
3321}
3322
3323/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3324 does not look inside SEQUENCEs. After reload this also skips over
3325 standalone USE and CLOBBER insn. */
23b2ce53
RS
3326
3327rtx
502b8322 3328prev_active_insn (rtx insn)
23b2ce53 3329{
75547801
KG
3330 while (insn)
3331 {
3332 insn = PREV_INSN (insn);
3333 if (insn == 0 || active_insn_p (insn))
3334 break;
3335 }
23b2ce53 3336
75547801 3337 return insn;
23b2ce53 3338}
23b2ce53
RS
3339\f
3340#ifdef HAVE_cc0
3341/* Return the next insn that uses CC0 after INSN, which is assumed to
3342 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3343 applied to the result of this function should yield INSN).
3344
3345 Normally, this is simply the next insn. However, if a REG_CC_USER note
3346 is present, it contains the insn that uses CC0.
3347
3348 Return 0 if we can't find the insn. */
3349
3350rtx
502b8322 3351next_cc0_user (rtx insn)
23b2ce53 3352{
906c4e36 3353 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3354
3355 if (note)
3356 return XEXP (note, 0);
3357
3358 insn = next_nonnote_insn (insn);
4b4bf941 3359 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3360 insn = XVECEXP (PATTERN (insn), 0, 0);
3361
2c3c49de 3362 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3363 return insn;
3364
3365 return 0;
3366}
3367
3368/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3369 note, it is the previous insn. */
3370
3371rtx
502b8322 3372prev_cc0_setter (rtx insn)
23b2ce53 3373{
906c4e36 3374 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3375
3376 if (note)
3377 return XEXP (note, 0);
3378
3379 insn = prev_nonnote_insn (insn);
5b0264cb 3380 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3381
3382 return insn;
3383}
3384#endif
e5bef2e4 3385
594f8779
RZ
3386#ifdef AUTO_INC_DEC
3387/* Find a RTX_AUTOINC class rtx which matches DATA. */
3388
3389static int
3390find_auto_inc (rtx *xp, void *data)
3391{
3392 rtx x = *xp;
5ead67f6 3393 rtx reg = (rtx) data;
594f8779
RZ
3394
3395 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3396 return 0;
3397
3398 switch (GET_CODE (x))
3399 {
3400 case PRE_DEC:
3401 case PRE_INC:
3402 case POST_DEC:
3403 case POST_INC:
3404 case PRE_MODIFY:
3405 case POST_MODIFY:
3406 if (rtx_equal_p (reg, XEXP (x, 0)))
3407 return 1;
3408 break;
3409
3410 default:
3411 gcc_unreachable ();
3412 }
3413 return -1;
3414}
3415#endif
3416
e5bef2e4
HB
3417/* Increment the label uses for all labels present in rtx. */
3418
3419static void
502b8322 3420mark_label_nuses (rtx x)
e5bef2e4 3421{
b3694847
SS
3422 enum rtx_code code;
3423 int i, j;
3424 const char *fmt;
e5bef2e4
HB
3425
3426 code = GET_CODE (x);
7537fc90 3427 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3428 LABEL_NUSES (XEXP (x, 0))++;
3429
3430 fmt = GET_RTX_FORMAT (code);
3431 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3432 {
3433 if (fmt[i] == 'e')
0fb7aeda 3434 mark_label_nuses (XEXP (x, i));
e5bef2e4 3435 else if (fmt[i] == 'E')
0fb7aeda 3436 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3437 mark_label_nuses (XVECEXP (x, i, j));
3438 }
3439}
3440
23b2ce53
RS
3441\f
3442/* Try splitting insns that can be split for better scheduling.
3443 PAT is the pattern which might split.
3444 TRIAL is the insn providing PAT.
cc2902df 3445 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3446
3447 If this routine succeeds in splitting, it returns the first or last
11147ebe 3448 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3449 returns TRIAL. If the insn to be returned can be split, it will be. */
3450
3451rtx
502b8322 3452try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3453{
3454 rtx before = PREV_INSN (trial);
3455 rtx after = NEXT_INSN (trial);
23b2ce53 3456 int has_barrier = 0;
4a8cae83 3457 rtx note, seq, tem;
6b24c259 3458 int probability;
599aedd9
RH
3459 rtx insn_last, insn;
3460 int njumps = 0;
6b24c259 3461
cd9c1ca8
RH
3462 /* We're not good at redistributing frame information. */
3463 if (RTX_FRAME_RELATED_P (trial))
3464 return trial;
3465
6b24c259
JH
3466 if (any_condjump_p (trial)
3467 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
e5af9ddd 3468 split_branch_probability = XINT (note, 0);
6b24c259
JH
3469 probability = split_branch_probability;
3470
3471 seq = split_insns (pat, trial);
3472
3473 split_branch_probability = -1;
23b2ce53
RS
3474
3475 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3476 We may need to handle this specially. */
4b4bf941 3477 if (after && BARRIER_P (after))
23b2ce53
RS
3478 {
3479 has_barrier = 1;
3480 after = NEXT_INSN (after);
3481 }
3482
599aedd9
RH
3483 if (!seq)
3484 return trial;
3485
3486 /* Avoid infinite loop if any insn of the result matches
3487 the original pattern. */
3488 insn_last = seq;
3489 while (1)
23b2ce53 3490 {
599aedd9
RH
3491 if (INSN_P (insn_last)
3492 && rtx_equal_p (PATTERN (insn_last), pat))
3493 return trial;
3494 if (!NEXT_INSN (insn_last))
3495 break;
3496 insn_last = NEXT_INSN (insn_last);
3497 }
750c9258 3498
6fb5fa3c
DB
3499 /* We will be adding the new sequence to the function. The splitters
3500 may have introduced invalid RTL sharing, so unshare the sequence now. */
3501 unshare_all_rtl_in_chain (seq);
3502
599aedd9
RH
3503 /* Mark labels. */
3504 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3505 {
4b4bf941 3506 if (JUMP_P (insn))
599aedd9
RH
3507 {
3508 mark_jump_label (PATTERN (insn), insn, 0);
3509 njumps++;
3510 if (probability != -1
3511 && any_condjump_p (insn)
3512 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3513 {
599aedd9
RH
3514 /* We can preserve the REG_BR_PROB notes only if exactly
3515 one jump is created, otherwise the machine description
3516 is responsible for this step using
3517 split_branch_probability variable. */
5b0264cb 3518 gcc_assert (njumps == 1);
e5af9ddd 3519 add_int_reg_note (insn, REG_BR_PROB, probability);
2f937369 3520 }
599aedd9
RH
3521 }
3522 }
3523
3524 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3525 in SEQ and copy any additional information across. */
4b4bf941 3526 if (CALL_P (trial))
599aedd9
RH
3527 {
3528 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3529 if (CALL_P (insn))
599aedd9 3530 {
65712d5c
RS
3531 rtx next, *p;
3532
3533 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3534 target may have explicitly specified. */
3535 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3536 while (*p)
3537 p = &XEXP (*p, 1);
3538 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3539
3540 /* If the old call was a sibling call, the new one must
3541 be too. */
599aedd9 3542 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3543
3544 /* If the new call is the last instruction in the sequence,
3545 it will effectively replace the old call in-situ. Otherwise
3546 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3547 so that it comes immediately after the new call. */
3548 if (NEXT_INSN (insn))
65f3dedb
RS
3549 for (next = NEXT_INSN (trial);
3550 next && NOTE_P (next);
3551 next = NEXT_INSN (next))
3552 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3553 {
3554 remove_insn (next);
3555 add_insn_after (next, insn, NULL);
65f3dedb 3556 break;
65712d5c 3557 }
599aedd9
RH
3558 }
3559 }
4b5e8abe 3560
599aedd9
RH
3561 /* Copy notes, particularly those related to the CFG. */
3562 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3563 {
3564 switch (REG_NOTE_KIND (note))
3565 {
3566 case REG_EH_REGION:
1d65f45c 3567 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3568 break;
216183ce 3569
599aedd9
RH
3570 case REG_NORETURN:
3571 case REG_SETJMP:
0a35513e 3572 case REG_TM:
594f8779 3573 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3574 {
4b4bf941 3575 if (CALL_P (insn))
65c5f2a6 3576 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3577 }
599aedd9 3578 break;
d6e95df8 3579
599aedd9 3580 case REG_NON_LOCAL_GOTO:
3371a64f 3581 case REG_CROSSING_JUMP:
594f8779 3582 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3583 {
4b4bf941 3584 if (JUMP_P (insn))
65c5f2a6 3585 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3586 }
599aedd9 3587 break;
e5bef2e4 3588
594f8779
RZ
3589#ifdef AUTO_INC_DEC
3590 case REG_INC:
3591 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3592 {
3593 rtx reg = XEXP (note, 0);
3594 if (!FIND_REG_INC_NOTE (insn, reg)
3595 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3596 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3597 }
3598 break;
3599#endif
3600
9a08d230
RH
3601 case REG_ARGS_SIZE:
3602 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3603 break;
3604
599aedd9
RH
3605 default:
3606 break;
23b2ce53 3607 }
599aedd9
RH
3608 }
3609
3610 /* If there are LABELS inside the split insns increment the
3611 usage count so we don't delete the label. */
cf7c4aa6 3612 if (INSN_P (trial))
599aedd9
RH
3613 {
3614 insn = insn_last;
3615 while (insn != NULL_RTX)
23b2ce53 3616 {
cf7c4aa6 3617 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3618 if (NONJUMP_INSN_P (insn))
599aedd9 3619 mark_label_nuses (PATTERN (insn));
23b2ce53 3620
599aedd9
RH
3621 insn = PREV_INSN (insn);
3622 }
23b2ce53
RS
3623 }
3624
5368224f 3625 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3626
3627 delete_insn (trial);
3628 if (has_barrier)
3629 emit_barrier_after (tem);
3630
3631 /* Recursively call try_split for each new insn created; by the
3632 time control returns here that insn will be fully split, so
3633 set LAST and continue from the insn after the one returned.
3634 We can't use next_active_insn here since AFTER may be a note.
3635 Ignore deleted insns, which can be occur if not optimizing. */
3636 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3637 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3638 tem = try_split (PATTERN (tem), tem, 1);
3639
3640 /* Return either the first or the last insn, depending on which was
3641 requested. */
3642 return last
5936d944 3643 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3644 : NEXT_INSN (before);
23b2ce53
RS
3645}
3646\f
3647/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3648 Store PATTERN in the pattern slots. */
23b2ce53
RS
3649
3650rtx
502b8322 3651make_insn_raw (rtx pattern)
23b2ce53 3652{
b3694847 3653 rtx insn;
23b2ce53 3654
1f8f4a0b 3655 insn = rtx_alloc (INSN);
23b2ce53 3656
43127294 3657 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3658 PATTERN (insn) = pattern;
3659 INSN_CODE (insn) = -1;
1632afca 3660 REG_NOTES (insn) = NULL;
5368224f 3661 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3662 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3663
47984720
NC
3664#ifdef ENABLE_RTL_CHECKING
3665 if (insn
2c3c49de 3666 && INSN_P (insn)
47984720
NC
3667 && (returnjump_p (insn)
3668 || (GET_CODE (insn) == SET
3669 && SET_DEST (insn) == pc_rtx)))
3670 {
d4ee4d25 3671 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3672 debug_rtx (insn);
3673 }
3674#endif
750c9258 3675
23b2ce53
RS
3676 return insn;
3677}
3678
b5b8b0ac
AO
3679/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3680
e4da1e17 3681static rtx
b5b8b0ac
AO
3682make_debug_insn_raw (rtx pattern)
3683{
3684 rtx insn;
3685
3686 insn = rtx_alloc (DEBUG_INSN);
3687 INSN_UID (insn) = cur_debug_insn_uid++;
3688 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3689 INSN_UID (insn) = cur_insn_uid++;
3690
3691 PATTERN (insn) = pattern;
3692 INSN_CODE (insn) = -1;
3693 REG_NOTES (insn) = NULL;
5368224f 3694 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3695 BLOCK_FOR_INSN (insn) = NULL;
3696
3697 return insn;
3698}
3699
2f937369 3700/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3701
e4da1e17 3702static rtx
502b8322 3703make_jump_insn_raw (rtx pattern)
23b2ce53 3704{
b3694847 3705 rtx insn;
23b2ce53 3706
4b1f5e8c 3707 insn = rtx_alloc (JUMP_INSN);
1632afca 3708 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3709
3710 PATTERN (insn) = pattern;
3711 INSN_CODE (insn) = -1;
1632afca
RS
3712 REG_NOTES (insn) = NULL;
3713 JUMP_LABEL (insn) = NULL;
5368224f 3714 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3715 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3716
3717 return insn;
3718}
aff507f4 3719
2f937369 3720/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3721
3722static rtx
502b8322 3723make_call_insn_raw (rtx pattern)
aff507f4 3724{
b3694847 3725 rtx insn;
aff507f4
RK
3726
3727 insn = rtx_alloc (CALL_INSN);
3728 INSN_UID (insn) = cur_insn_uid++;
3729
3730 PATTERN (insn) = pattern;
3731 INSN_CODE (insn) = -1;
aff507f4
RK
3732 REG_NOTES (insn) = NULL;
3733 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3734 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3735 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3736
3737 return insn;
3738}
96fba521
SB
3739
3740/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3741
3742static rtx
3743make_note_raw (enum insn_note subtype)
3744{
3745 /* Some notes are never created this way at all. These notes are
3746 only created by patching out insns. */
3747 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3748 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3749
3750 rtx note = rtx_alloc (NOTE);
3751 INSN_UID (note) = cur_insn_uid++;
3752 NOTE_KIND (note) = subtype;
3753 BLOCK_FOR_INSN (note) = NULL;
3754 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3755 return note;
3756}
23b2ce53 3757\f
96fba521
SB
3758/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3759 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3760 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3761
3762static inline void
3763link_insn_into_chain (rtx insn, rtx prev, rtx next)
3764{
3765 PREV_INSN (insn) = prev;
3766 NEXT_INSN (insn) = next;
3767 if (prev != NULL)
3768 {
3769 NEXT_INSN (prev) = insn;
3770 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3771 {
3772 rtx sequence = PATTERN (prev);
3773 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3774 }
3775 }
3776 if (next != NULL)
3777 {
3778 PREV_INSN (next) = insn;
3779 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3780 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3781 }
3ccb989e
SB
3782
3783 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3784 {
3785 rtx sequence = PATTERN (insn);
3786 PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3787 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3788 }
96fba521
SB
3789}
3790
23b2ce53
RS
3791/* Add INSN to the end of the doubly-linked list.
3792 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3793
3794void
502b8322 3795add_insn (rtx insn)
23b2ce53 3796{
96fba521
SB
3797 rtx prev = get_last_insn ();
3798 link_insn_into_chain (insn, prev, NULL);
5936d944
JH
3799 if (NULL == get_insns ())
3800 set_first_insn (insn);
5936d944 3801 set_last_insn (insn);
23b2ce53
RS
3802}
3803
96fba521 3804/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 3805
96fba521
SB
3806static void
3807add_insn_after_nobb (rtx insn, rtx after)
23b2ce53
RS
3808{
3809 rtx next = NEXT_INSN (after);
3810
5b0264cb 3811 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3812
96fba521 3813 link_insn_into_chain (insn, after, next);
23b2ce53 3814
96fba521 3815 if (next == NULL)
23b2ce53 3816 {
96fba521
SB
3817 if (get_last_insn () == after)
3818 set_last_insn (insn);
3819 else
3820 {
3821 struct sequence_stack *stack = seq_stack;
3822 /* Scan all pending sequences too. */
3823 for (; stack; stack = stack->next)
3824 if (after == stack->last)
3825 {
3826 stack->last = insn;
3827 break;
3828 }
3829 }
23b2ce53 3830 }
96fba521
SB
3831}
3832
3833/* Add INSN into the doubly-linked list before insn BEFORE. */
3834
3835static void
3836add_insn_before_nobb (rtx insn, rtx before)
3837{
3838 rtx prev = PREV_INSN (before);
3839
3840 gcc_assert (!optimize || !INSN_DELETED_P (before));
3841
3842 link_insn_into_chain (insn, prev, before);
3843
3844 if (prev == NULL)
23b2ce53 3845 {
96fba521
SB
3846 if (get_insns () == before)
3847 set_first_insn (insn);
3848 else
3849 {
3850 struct sequence_stack *stack = seq_stack;
3851 /* Scan all pending sequences too. */
3852 for (; stack; stack = stack->next)
3853 if (before == stack->first)
3854 {
3855 stack->first = insn;
3856 break;
3857 }
a0ae8e8d 3858
96fba521
SB
3859 gcc_assert (stack);
3860 }
23b2ce53 3861 }
96fba521
SB
3862}
3863
3864/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3865 If BB is NULL, an attempt is made to infer the bb from before.
3866
3867 This and the next function should be the only functions called
3868 to insert an insn once delay slots have been filled since only
3869 they know how to update a SEQUENCE. */
23b2ce53 3870
96fba521
SB
3871void
3872add_insn_after (rtx insn, rtx after, basic_block bb)
3873{
3874 add_insn_after_nobb (insn, after);
4b4bf941
JQ
3875 if (!BARRIER_P (after)
3876 && !BARRIER_P (insn)
3c030e88
JH
3877 && (bb = BLOCK_FOR_INSN (after)))
3878 {
3879 set_block_for_insn (insn, bb);
38c1593d 3880 if (INSN_P (insn))
6fb5fa3c 3881 df_insn_rescan (insn);
3c030e88 3882 /* Should not happen as first in the BB is always
a1f300c0 3883 either NOTE or LABEL. */
a813c111 3884 if (BB_END (bb) == after
3c030e88 3885 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3886 && !BARRIER_P (insn)
a38e7aa5 3887 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3888 BB_END (bb) = insn;
3c030e88 3889 }
23b2ce53
RS
3890}
3891
96fba521
SB
3892/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
3893 If BB is NULL, an attempt is made to infer the bb from before.
3894
3895 This and the previous function should be the only functions called
3896 to insert an insn once delay slots have been filled since only
3897 they know how to update a SEQUENCE. */
a0ae8e8d
RK
3898
3899void
6fb5fa3c 3900add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d 3901{
96fba521 3902 add_insn_before_nobb (insn, before);
a0ae8e8d 3903
b8698a0f 3904 if (!bb
6fb5fa3c
DB
3905 && !BARRIER_P (before)
3906 && !BARRIER_P (insn))
3907 bb = BLOCK_FOR_INSN (before);
3908
3909 if (bb)
3c030e88
JH
3910 {
3911 set_block_for_insn (insn, bb);
38c1593d 3912 if (INSN_P (insn))
6fb5fa3c 3913 df_insn_rescan (insn);
5b0264cb 3914 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3915 LABEL. */
5b0264cb
NS
3916 gcc_assert (BB_HEAD (bb) != insn
3917 /* Avoid clobbering of structure when creating new BB. */
3918 || BARRIER_P (insn)
a38e7aa5 3919 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 3920 }
a0ae8e8d
RK
3921}
3922
6fb5fa3c
DB
3923/* Replace insn with an deleted instruction note. */
3924
0ce2b299
EB
3925void
3926set_insn_deleted (rtx insn)
6fb5fa3c 3927{
39718607 3928 if (INSN_P (insn))
80eb8028 3929 df_insn_delete (insn);
6fb5fa3c
DB
3930 PUT_CODE (insn, NOTE);
3931 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3932}
3933
3934
1f397f45
SB
3935/* Unlink INSN from the insn chain.
3936
3937 This function knows how to handle sequences.
3938
3939 This function does not invalidate data flow information associated with
3940 INSN (i.e. does not call df_insn_delete). That makes this function
3941 usable for only disconnecting an insn from the chain, and re-emit it
3942 elsewhere later.
3943
3944 To later insert INSN elsewhere in the insn chain via add_insn and
3945 similar functions, PREV_INSN and NEXT_INSN must be nullified by
3946 the caller. Nullifying them here breaks many insn chain walks.
3947
3948 To really delete an insn and related DF information, use delete_insn. */
3949
89e99eea 3950void
502b8322 3951remove_insn (rtx insn)
89e99eea
DB
3952{
3953 rtx next = NEXT_INSN (insn);
3954 rtx prev = PREV_INSN (insn);
53c17031
JH
3955 basic_block bb;
3956
89e99eea
DB
3957 if (prev)
3958 {
3959 NEXT_INSN (prev) = next;
4b4bf941 3960 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3961 {
3962 rtx sequence = PATTERN (prev);
3963 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3964 }
3965 }
5936d944
JH
3966 else if (get_insns () == insn)
3967 {
fb9ef4c1
JH
3968 if (next)
3969 PREV_INSN (next) = NULL;
5936d944
JH
3970 set_first_insn (next);
3971 }
89e99eea
DB
3972 else
3973 {
49ad7cfa 3974 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3975 /* Scan all pending sequences too. */
3976 for (; stack; stack = stack->next)
3977 if (insn == stack->first)
3978 {
3979 stack->first = next;
3980 break;
3981 }
3982
5b0264cb 3983 gcc_assert (stack);
89e99eea
DB
3984 }
3985
3986 if (next)
3987 {
3988 PREV_INSN (next) = prev;
4b4bf941 3989 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3990 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3991 }
5936d944
JH
3992 else if (get_last_insn () == insn)
3993 set_last_insn (prev);
89e99eea
DB
3994 else
3995 {
49ad7cfa 3996 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3997 /* Scan all pending sequences too. */
3998 for (; stack; stack = stack->next)
3999 if (insn == stack->last)
4000 {
4001 stack->last = prev;
4002 break;
4003 }
4004
5b0264cb 4005 gcc_assert (stack);
89e99eea 4006 }
80eb8028 4007
80eb8028 4008 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4009 if (!BARRIER_P (insn)
53c17031
JH
4010 && (bb = BLOCK_FOR_INSN (insn)))
4011 {
a813c111 4012 if (BB_HEAD (bb) == insn)
53c17031 4013 {
3bf1e984
RK
4014 /* Never ever delete the basic block note without deleting whole
4015 basic block. */
5b0264cb 4016 gcc_assert (!NOTE_P (insn));
a813c111 4017 BB_HEAD (bb) = next;
53c17031 4018 }
a813c111
SB
4019 if (BB_END (bb) == insn)
4020 BB_END (bb) = prev;
53c17031 4021 }
89e99eea
DB
4022}
4023
ee960939
OH
4024/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4025
4026void
502b8322 4027add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4028{
5b0264cb 4029 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4030
4031 /* Put the register usage information on the CALL. If there is already
4032 some usage information, put ours at the end. */
4033 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4034 {
4035 rtx link;
4036
4037 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4038 link = XEXP (link, 1))
4039 ;
4040
4041 XEXP (link, 1) = call_fusage;
4042 }
4043 else
4044 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4045}
4046
23b2ce53
RS
4047/* Delete all insns made since FROM.
4048 FROM becomes the new last instruction. */
4049
4050void
502b8322 4051delete_insns_since (rtx from)
23b2ce53
RS
4052{
4053 if (from == 0)
5936d944 4054 set_first_insn (0);
23b2ce53
RS
4055 else
4056 NEXT_INSN (from) = 0;
5936d944 4057 set_last_insn (from);
23b2ce53
RS
4058}
4059
5dab5552
MS
4060/* This function is deprecated, please use sequences instead.
4061
4062 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4063 The insns to be moved are those between FROM and TO.
4064 They are moved to a new position after the insn AFTER.
4065 AFTER must not be FROM or TO or any insn in between.
4066
4067 This function does not know about SEQUENCEs and hence should not be
4068 called after delay-slot filling has been done. */
4069
4070void
502b8322 4071reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53 4072{
4f8344eb
HPN
4073#ifdef ENABLE_CHECKING
4074 rtx x;
4075 for (x = from; x != to; x = NEXT_INSN (x))
4076 gcc_assert (after != x);
4077 gcc_assert (after != to);
4078#endif
4079
23b2ce53
RS
4080 /* Splice this bunch out of where it is now. */
4081 if (PREV_INSN (from))
4082 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4083 if (NEXT_INSN (to))
4084 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4085 if (get_last_insn () == to)
4086 set_last_insn (PREV_INSN (from));
4087 if (get_insns () == from)
4088 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4089
4090 /* Make the new neighbors point to it and it to them. */
4091 if (NEXT_INSN (after))
4092 PREV_INSN (NEXT_INSN (after)) = to;
4093
4094 NEXT_INSN (to) = NEXT_INSN (after);
4095 PREV_INSN (from) = after;
4096 NEXT_INSN (after) = from;
c3284718 4097 if (after == get_last_insn ())
5936d944 4098 set_last_insn (to);
23b2ce53
RS
4099}
4100
3c030e88
JH
4101/* Same as function above, but take care to update BB boundaries. */
4102void
502b8322 4103reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
4104{
4105 rtx prev = PREV_INSN (from);
4106 basic_block bb, bb2;
4107
4108 reorder_insns_nobb (from, to, after);
4109
4b4bf941 4110 if (!BARRIER_P (after)
3c030e88
JH
4111 && (bb = BLOCK_FOR_INSN (after)))
4112 {
4113 rtx x;
6fb5fa3c 4114 df_set_bb_dirty (bb);
68252e27 4115
4b4bf941 4116 if (!BARRIER_P (from)
3c030e88
JH
4117 && (bb2 = BLOCK_FOR_INSN (from)))
4118 {
a813c111
SB
4119 if (BB_END (bb2) == to)
4120 BB_END (bb2) = prev;
6fb5fa3c 4121 df_set_bb_dirty (bb2);
3c030e88
JH
4122 }
4123
a813c111
SB
4124 if (BB_END (bb) == after)
4125 BB_END (bb) = to;
3c030e88
JH
4126
4127 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4128 if (!BARRIER_P (x))
63642d5a 4129 df_insn_change_bb (x, bb);
3c030e88
JH
4130 }
4131}
4132
23b2ce53 4133\f
2f937369
DM
4134/* Emit insn(s) of given code and pattern
4135 at a specified place within the doubly-linked list.
23b2ce53 4136
2f937369
DM
4137 All of the emit_foo global entry points accept an object
4138 X which is either an insn list or a PATTERN of a single
4139 instruction.
23b2ce53 4140
2f937369
DM
4141 There are thus a few canonical ways to generate code and
4142 emit it at a specific place in the instruction stream. For
4143 example, consider the instruction named SPOT and the fact that
4144 we would like to emit some instructions before SPOT. We might
4145 do it like this:
23b2ce53 4146
2f937369
DM
4147 start_sequence ();
4148 ... emit the new instructions ...
4149 insns_head = get_insns ();
4150 end_sequence ();
23b2ce53 4151
2f937369 4152 emit_insn_before (insns_head, SPOT);
23b2ce53 4153
2f937369
DM
4154 It used to be common to generate SEQUENCE rtl instead, but that
4155 is a relic of the past which no longer occurs. The reason is that
4156 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4157 generated would almost certainly die right after it was created. */
23b2ce53 4158
5f02387d
NF
4159static rtx
4160emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4161 rtx (*make_raw) (rtx))
23b2ce53 4162{
b3694847 4163 rtx insn;
23b2ce53 4164
5b0264cb 4165 gcc_assert (before);
2f937369
DM
4166
4167 if (x == NULL_RTX)
4168 return last;
4169
4170 switch (GET_CODE (x))
23b2ce53 4171 {
b5b8b0ac 4172 case DEBUG_INSN:
2f937369
DM
4173 case INSN:
4174 case JUMP_INSN:
4175 case CALL_INSN:
4176 case CODE_LABEL:
4177 case BARRIER:
4178 case NOTE:
4179 insn = x;
4180 while (insn)
4181 {
4182 rtx next = NEXT_INSN (insn);
6fb5fa3c 4183 add_insn_before (insn, before, bb);
2f937369
DM
4184 last = insn;
4185 insn = next;
4186 }
4187 break;
4188
4189#ifdef ENABLE_RTL_CHECKING
4190 case SEQUENCE:
5b0264cb 4191 gcc_unreachable ();
2f937369
DM
4192 break;
4193#endif
4194
4195 default:
5f02387d 4196 last = (*make_raw) (x);
6fb5fa3c 4197 add_insn_before (last, before, bb);
2f937369 4198 break;
23b2ce53
RS
4199 }
4200
2f937369 4201 return last;
23b2ce53
RS
4202}
4203
5f02387d
NF
4204/* Make X be output before the instruction BEFORE. */
4205
4206rtx
4207emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4208{
4209 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4210}
4211
2f937369 4212/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4213 and output it before the instruction BEFORE. */
4214
4215rtx
a7102479 4216emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4217{
5f02387d
NF
4218 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4219 make_jump_insn_raw);
23b2ce53
RS
4220}
4221
2f937369 4222/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4223 and output it before the instruction BEFORE. */
4224
4225rtx
a7102479 4226emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4227{
5f02387d
NF
4228 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4229 make_call_insn_raw);
969d70ca
JH
4230}
4231
b5b8b0ac
AO
4232/* Make an instruction with body X and code DEBUG_INSN
4233 and output it before the instruction BEFORE. */
4234
4235rtx
4236emit_debug_insn_before_noloc (rtx x, rtx before)
4237{
5f02387d
NF
4238 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4239 make_debug_insn_raw);
b5b8b0ac
AO
4240}
4241
23b2ce53 4242/* Make an insn of code BARRIER
e881bb1b 4243 and output it before the insn BEFORE. */
23b2ce53
RS
4244
4245rtx
502b8322 4246emit_barrier_before (rtx before)
23b2ce53 4247{
b3694847 4248 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4249
4250 INSN_UID (insn) = cur_insn_uid++;
4251
6fb5fa3c 4252 add_insn_before (insn, before, NULL);
23b2ce53
RS
4253 return insn;
4254}
4255
e881bb1b
RH
4256/* Emit the label LABEL before the insn BEFORE. */
4257
4258rtx
502b8322 4259emit_label_before (rtx label, rtx before)
e881bb1b 4260{
468660d3
SB
4261 gcc_checking_assert (INSN_UID (label) == 0);
4262 INSN_UID (label) = cur_insn_uid++;
4263 add_insn_before (label, before, NULL);
e881bb1b
RH
4264 return label;
4265}
23b2ce53 4266\f
2f937369
DM
4267/* Helper for emit_insn_after, handles lists of instructions
4268 efficiently. */
23b2ce53 4269
2f937369 4270static rtx
6fb5fa3c 4271emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4272{
2f937369
DM
4273 rtx last;
4274 rtx after_after;
6fb5fa3c
DB
4275 if (!bb && !BARRIER_P (after))
4276 bb = BLOCK_FOR_INSN (after);
23b2ce53 4277
6fb5fa3c 4278 if (bb)
23b2ce53 4279 {
6fb5fa3c 4280 df_set_bb_dirty (bb);
2f937369 4281 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4282 if (!BARRIER_P (last))
6fb5fa3c
DB
4283 {
4284 set_block_for_insn (last, bb);
4285 df_insn_rescan (last);
4286 }
4b4bf941 4287 if (!BARRIER_P (last))
6fb5fa3c
DB
4288 {
4289 set_block_for_insn (last, bb);
4290 df_insn_rescan (last);
4291 }
a813c111
SB
4292 if (BB_END (bb) == after)
4293 BB_END (bb) = last;
23b2ce53
RS
4294 }
4295 else
2f937369
DM
4296 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4297 continue;
4298
4299 after_after = NEXT_INSN (after);
4300
4301 NEXT_INSN (after) = first;
4302 PREV_INSN (first) = after;
4303 NEXT_INSN (last) = after_after;
4304 if (after_after)
4305 PREV_INSN (after_after) = last;
4306
c3284718 4307 if (after == get_last_insn ())
5936d944 4308 set_last_insn (last);
e855c69d 4309
2f937369
DM
4310 return last;
4311}
4312
5f02387d
NF
4313static rtx
4314emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4315 rtx (*make_raw)(rtx))
2f937369
DM
4316{
4317 rtx last = after;
4318
5b0264cb 4319 gcc_assert (after);
2f937369
DM
4320
4321 if (x == NULL_RTX)
4322 return last;
4323
4324 switch (GET_CODE (x))
23b2ce53 4325 {
b5b8b0ac 4326 case DEBUG_INSN:
2f937369
DM
4327 case INSN:
4328 case JUMP_INSN:
4329 case CALL_INSN:
4330 case CODE_LABEL:
4331 case BARRIER:
4332 case NOTE:
6fb5fa3c 4333 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4334 break;
4335
4336#ifdef ENABLE_RTL_CHECKING
4337 case SEQUENCE:
5b0264cb 4338 gcc_unreachable ();
2f937369
DM
4339 break;
4340#endif
4341
4342 default:
5f02387d 4343 last = (*make_raw) (x);
6fb5fa3c 4344 add_insn_after (last, after, bb);
2f937369 4345 break;
23b2ce53
RS
4346 }
4347
2f937369 4348 return last;
23b2ce53
RS
4349}
4350
5f02387d
NF
4351/* Make X be output after the insn AFTER and set the BB of insn. If
4352 BB is NULL, an attempt is made to infer the BB from AFTER. */
4353
4354rtx
4355emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4356{
4357 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4358}
4359
255680cf 4360
2f937369 4361/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4362 and output it after the insn AFTER. */
4363
4364rtx
a7102479 4365emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4366{
5f02387d 4367 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
2f937369
DM
4368}
4369
4370/* Make an instruction with body X and code CALL_INSN
4371 and output it after the instruction AFTER. */
4372
4373rtx
a7102479 4374emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4375{
5f02387d 4376 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4377}
4378
b5b8b0ac
AO
4379/* Make an instruction with body X and code CALL_INSN
4380 and output it after the instruction AFTER. */
4381
4382rtx
4383emit_debug_insn_after_noloc (rtx x, rtx after)
4384{
5f02387d 4385 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4386}
4387
23b2ce53
RS
4388/* Make an insn of code BARRIER
4389 and output it after the insn AFTER. */
4390
4391rtx
502b8322 4392emit_barrier_after (rtx after)
23b2ce53 4393{
b3694847 4394 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4395
4396 INSN_UID (insn) = cur_insn_uid++;
4397
6fb5fa3c 4398 add_insn_after (insn, after, NULL);
23b2ce53
RS
4399 return insn;
4400}
4401
4402/* Emit the label LABEL after the insn AFTER. */
4403
4404rtx
502b8322 4405emit_label_after (rtx label, rtx after)
23b2ce53 4406{
468660d3
SB
4407 gcc_checking_assert (INSN_UID (label) == 0);
4408 INSN_UID (label) = cur_insn_uid++;
4409 add_insn_after (label, after, NULL);
23b2ce53
RS
4410 return label;
4411}
96fba521
SB
4412\f
4413/* Notes require a bit of special handling: Some notes need to have their
4414 BLOCK_FOR_INSN set, others should never have it set, and some should
4415 have it set or clear depending on the context. */
4416
4417/* Return true iff a note of kind SUBTYPE should be emitted with routines
4418 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4419 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4420
4421static bool
4422note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4423{
4424 switch (subtype)
4425 {
4426 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4427 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4428 return true;
4429
4430 /* Notes for var tracking and EH region markers can appear between or
4431 inside basic blocks. If the caller is emitting on the basic block
4432 boundary, do not set BLOCK_FOR_INSN on the new note. */
4433 case NOTE_INSN_VAR_LOCATION:
4434 case NOTE_INSN_CALL_ARG_LOCATION:
4435 case NOTE_INSN_EH_REGION_BEG:
4436 case NOTE_INSN_EH_REGION_END:
4437 return on_bb_boundary_p;
4438
4439 /* Otherwise, BLOCK_FOR_INSN must be set. */
4440 default:
4441 return false;
4442 }
4443}
23b2ce53
RS
4444
4445/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4446
4447rtx
a38e7aa5 4448emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4449{
96fba521
SB
4450 rtx note = make_note_raw (subtype);
4451 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4452 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4453
4454 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4455 add_insn_after_nobb (note, after);
4456 else
4457 add_insn_after (note, after, bb);
4458 return note;
4459}
4460
4461/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4462
4463rtx
4464emit_note_before (enum insn_note subtype, rtx before)
4465{
4466 rtx note = make_note_raw (subtype);
4467 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4468 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4469
4470 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4471 add_insn_before_nobb (note, before);
4472 else
4473 add_insn_before (note, before, bb);
23b2ce53
RS
4474 return note;
4475}
23b2ce53 4476\f
e8110d6f
NF
4477/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4478 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4479
4480static rtx
4481emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4482 rtx (*make_raw) (rtx))
0d682900 4483{
e8110d6f 4484 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4485
a7102479 4486 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4487 return last;
4488
2f937369
DM
4489 after = NEXT_INSN (after);
4490 while (1)
4491 {
5368224f
DC
4492 if (active_insn_p (after) && !INSN_LOCATION (after))
4493 INSN_LOCATION (after) = loc;
2f937369
DM
4494 if (after == last)
4495 break;
4496 after = NEXT_INSN (after);
4497 }
0d682900
JH
4498 return last;
4499}
4500
e8110d6f
NF
4501/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4502 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4503 any DEBUG_INSNs. */
4504
4505static rtx
4506emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4507 rtx (*make_raw) (rtx))
a7102479 4508{
b5b8b0ac
AO
4509 rtx prev = after;
4510
e8110d6f
NF
4511 if (skip_debug_insns)
4512 while (DEBUG_INSN_P (prev))
4513 prev = PREV_INSN (prev);
b5b8b0ac
AO
4514
4515 if (INSN_P (prev))
5368224f 4516 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4517 make_raw);
a7102479 4518 else
e8110d6f 4519 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4520}
4521
5368224f 4522/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
0d682900 4523rtx
e8110d6f 4524emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4525{
e8110d6f
NF
4526 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4527}
2f937369 4528
5368224f 4529/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
e8110d6f
NF
4530rtx
4531emit_insn_after (rtx pattern, rtx after)
4532{
4533 return emit_pattern_after (pattern, after, true, make_insn_raw);
4534}
dd3adcf8 4535
5368224f 4536/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
e8110d6f
NF
4537rtx
4538emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4539{
4540 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
0d682900
JH
4541}
4542
5368224f 4543/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
a7102479
JH
4544rtx
4545emit_jump_insn_after (rtx pattern, rtx after)
4546{
e8110d6f 4547 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
a7102479
JH
4548}
4549
5368224f 4550/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
0d682900 4551rtx
502b8322 4552emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4553{
e8110d6f 4554 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4555}
4556
5368224f 4557/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
a7102479
JH
4558rtx
4559emit_call_insn_after (rtx pattern, rtx after)
4560{
e8110d6f 4561 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4562}
4563
5368224f 4564/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
b5b8b0ac
AO
4565rtx
4566emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4567{
e8110d6f 4568 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4569}
4570
5368224f 4571/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
b5b8b0ac
AO
4572rtx
4573emit_debug_insn_after (rtx pattern, rtx after)
4574{
e8110d6f 4575 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4576}
4577
e8110d6f
NF
4578/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4579 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4580 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4581 CALL_INSN, etc. */
4582
4583static rtx
4584emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4585 rtx (*make_raw) (rtx))
0d682900
JH
4586{
4587 rtx first = PREV_INSN (before);
e8110d6f
NF
4588 rtx last = emit_pattern_before_noloc (pattern, before,
4589 insnp ? before : NULL_RTX,
4590 NULL, make_raw);
a7102479
JH
4591
4592 if (pattern == NULL_RTX || !loc)
4593 return last;
4594
26cb3993
JH
4595 if (!first)
4596 first = get_insns ();
4597 else
4598 first = NEXT_INSN (first);
a7102479
JH
4599 while (1)
4600 {
5368224f
DC
4601 if (active_insn_p (first) && !INSN_LOCATION (first))
4602 INSN_LOCATION (first) = loc;
a7102479
JH
4603 if (first == last)
4604 break;
4605 first = NEXT_INSN (first);
4606 }
4607 return last;
4608}
4609
e8110d6f
NF
4610/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4611 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4612 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4613 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4614
4615static rtx
4616emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4617 bool insnp, rtx (*make_raw) (rtx))
a7102479 4618{
b5b8b0ac
AO
4619 rtx next = before;
4620
e8110d6f
NF
4621 if (skip_debug_insns)
4622 while (DEBUG_INSN_P (next))
4623 next = PREV_INSN (next);
b5b8b0ac
AO
4624
4625 if (INSN_P (next))
5368224f 4626 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4627 insnp, make_raw);
a7102479 4628 else
e8110d6f
NF
4629 return emit_pattern_before_noloc (pattern, before,
4630 insnp ? before : NULL_RTX,
4631 NULL, make_raw);
a7102479
JH
4632}
4633
5368224f 4634/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
a7102479 4635rtx
e8110d6f 4636emit_insn_before_setloc (rtx pattern, rtx before, int loc)
a7102479 4637{
e8110d6f
NF
4638 return emit_pattern_before_setloc (pattern, before, loc, true,
4639 make_insn_raw);
4640}
a7102479 4641
5368224f 4642/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
e8110d6f
NF
4643rtx
4644emit_insn_before (rtx pattern, rtx before)
4645{
4646 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4647}
a7102479 4648
5368224f 4649/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
e8110d6f
NF
4650rtx
4651emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4652{
4653 return emit_pattern_before_setloc (pattern, before, loc, false,
4654 make_jump_insn_raw);
a7102479
JH
4655}
4656
5368224f 4657/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
a7102479
JH
4658rtx
4659emit_jump_insn_before (rtx pattern, rtx before)
4660{
e8110d6f
NF
4661 return emit_pattern_before (pattern, before, true, false,
4662 make_jump_insn_raw);
a7102479
JH
4663}
4664
5368224f 4665/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
a7102479
JH
4666rtx
4667emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4668{
e8110d6f
NF
4669 return emit_pattern_before_setloc (pattern, before, loc, false,
4670 make_call_insn_raw);
0d682900 4671}
a7102479 4672
e8110d6f 4673/* Like emit_call_insn_before_noloc,
5368224f 4674 but set insn_location according to BEFORE. */
a7102479
JH
4675rtx
4676emit_call_insn_before (rtx pattern, rtx before)
4677{
e8110d6f
NF
4678 return emit_pattern_before (pattern, before, true, false,
4679 make_call_insn_raw);
a7102479 4680}
b5b8b0ac 4681
5368224f 4682/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
b5b8b0ac
AO
4683rtx
4684emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4685{
e8110d6f
NF
4686 return emit_pattern_before_setloc (pattern, before, loc, false,
4687 make_debug_insn_raw);
b5b8b0ac
AO
4688}
4689
e8110d6f 4690/* Like emit_debug_insn_before_noloc,
5368224f 4691 but set insn_location according to BEFORE. */
b5b8b0ac
AO
4692rtx
4693emit_debug_insn_before (rtx pattern, rtx before)
4694{
e8110d6f
NF
4695 return emit_pattern_before (pattern, before, false, false,
4696 make_debug_insn_raw);
b5b8b0ac 4697}
0d682900 4698\f
2f937369
DM
4699/* Take X and emit it at the end of the doubly-linked
4700 INSN list.
23b2ce53
RS
4701
4702 Returns the last insn emitted. */
4703
4704rtx
502b8322 4705emit_insn (rtx x)
23b2ce53 4706{
c3284718 4707 rtx last = get_last_insn ();
2f937369 4708 rtx insn;
23b2ce53 4709
2f937369
DM
4710 if (x == NULL_RTX)
4711 return last;
23b2ce53 4712
2f937369
DM
4713 switch (GET_CODE (x))
4714 {
b5b8b0ac 4715 case DEBUG_INSN:
2f937369
DM
4716 case INSN:
4717 case JUMP_INSN:
4718 case CALL_INSN:
4719 case CODE_LABEL:
4720 case BARRIER:
4721 case NOTE:
4722 insn = x;
4723 while (insn)
23b2ce53 4724 {
2f937369 4725 rtx next = NEXT_INSN (insn);
23b2ce53 4726 add_insn (insn);
2f937369
DM
4727 last = insn;
4728 insn = next;
23b2ce53 4729 }
2f937369 4730 break;
23b2ce53 4731
2f937369 4732#ifdef ENABLE_RTL_CHECKING
39718607 4733 case JUMP_TABLE_DATA:
2f937369 4734 case SEQUENCE:
5b0264cb 4735 gcc_unreachable ();
2f937369
DM
4736 break;
4737#endif
23b2ce53 4738
2f937369
DM
4739 default:
4740 last = make_insn_raw (x);
4741 add_insn (last);
4742 break;
23b2ce53
RS
4743 }
4744
4745 return last;
4746}
4747
b5b8b0ac
AO
4748/* Make an insn of code DEBUG_INSN with pattern X
4749 and add it to the end of the doubly-linked list. */
4750
4751rtx
4752emit_debug_insn (rtx x)
4753{
c3284718 4754 rtx last = get_last_insn ();
b5b8b0ac
AO
4755 rtx insn;
4756
4757 if (x == NULL_RTX)
4758 return last;
4759
4760 switch (GET_CODE (x))
4761 {
4762 case DEBUG_INSN:
4763 case INSN:
4764 case JUMP_INSN:
4765 case CALL_INSN:
4766 case CODE_LABEL:
4767 case BARRIER:
4768 case NOTE:
4769 insn = x;
4770 while (insn)
4771 {
4772 rtx next = NEXT_INSN (insn);
4773 add_insn (insn);
4774 last = insn;
4775 insn = next;
4776 }
4777 break;
4778
4779#ifdef ENABLE_RTL_CHECKING
39718607 4780 case JUMP_TABLE_DATA:
b5b8b0ac
AO
4781 case SEQUENCE:
4782 gcc_unreachable ();
4783 break;
4784#endif
4785
4786 default:
4787 last = make_debug_insn_raw (x);
4788 add_insn (last);
4789 break;
4790 }
4791
4792 return last;
4793}
4794
2f937369
DM
4795/* Make an insn of code JUMP_INSN with pattern X
4796 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4797
4798rtx
502b8322 4799emit_jump_insn (rtx x)
23b2ce53 4800{
d950dee3 4801 rtx last = NULL_RTX, insn;
23b2ce53 4802
2f937369 4803 switch (GET_CODE (x))
23b2ce53 4804 {
b5b8b0ac 4805 case DEBUG_INSN:
2f937369
DM
4806 case INSN:
4807 case JUMP_INSN:
4808 case CALL_INSN:
4809 case CODE_LABEL:
4810 case BARRIER:
4811 case NOTE:
4812 insn = x;
4813 while (insn)
4814 {
4815 rtx next = NEXT_INSN (insn);
4816 add_insn (insn);
4817 last = insn;
4818 insn = next;
4819 }
4820 break;
e0a5c5eb 4821
2f937369 4822#ifdef ENABLE_RTL_CHECKING
39718607 4823 case JUMP_TABLE_DATA:
2f937369 4824 case SEQUENCE:
5b0264cb 4825 gcc_unreachable ();
2f937369
DM
4826 break;
4827#endif
e0a5c5eb 4828
2f937369
DM
4829 default:
4830 last = make_jump_insn_raw (x);
4831 add_insn (last);
4832 break;
3c030e88 4833 }
e0a5c5eb
RS
4834
4835 return last;
4836}
4837
2f937369 4838/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4839 and add it to the end of the doubly-linked list. */
4840
4841rtx
502b8322 4842emit_call_insn (rtx x)
23b2ce53 4843{
2f937369
DM
4844 rtx insn;
4845
4846 switch (GET_CODE (x))
23b2ce53 4847 {
b5b8b0ac 4848 case DEBUG_INSN:
2f937369
DM
4849 case INSN:
4850 case JUMP_INSN:
4851 case CALL_INSN:
4852 case CODE_LABEL:
4853 case BARRIER:
4854 case NOTE:
4855 insn = emit_insn (x);
4856 break;
23b2ce53 4857
2f937369
DM
4858#ifdef ENABLE_RTL_CHECKING
4859 case SEQUENCE:
39718607 4860 case JUMP_TABLE_DATA:
5b0264cb 4861 gcc_unreachable ();
2f937369
DM
4862 break;
4863#endif
23b2ce53 4864
2f937369
DM
4865 default:
4866 insn = make_call_insn_raw (x);
23b2ce53 4867 add_insn (insn);
2f937369 4868 break;
23b2ce53 4869 }
2f937369
DM
4870
4871 return insn;
23b2ce53
RS
4872}
4873
4874/* Add the label LABEL to the end of the doubly-linked list. */
4875
4876rtx
502b8322 4877emit_label (rtx label)
23b2ce53 4878{
468660d3
SB
4879 gcc_checking_assert (INSN_UID (label) == 0);
4880 INSN_UID (label) = cur_insn_uid++;
4881 add_insn (label);
23b2ce53
RS
4882 return label;
4883}
4884
39718607
SB
4885/* Make an insn of code JUMP_TABLE_DATA
4886 and add it to the end of the doubly-linked list. */
4887
4888rtx
4889emit_jump_table_data (rtx table)
4890{
4891 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4892 INSN_UID (jump_table_data) = cur_insn_uid++;
4893 PATTERN (jump_table_data) = table;
4894 BLOCK_FOR_INSN (jump_table_data) = NULL;
4895 add_insn (jump_table_data);
4896 return jump_table_data;
4897}
4898
23b2ce53
RS
4899/* Make an insn of code BARRIER
4900 and add it to the end of the doubly-linked list. */
4901
4902rtx
502b8322 4903emit_barrier (void)
23b2ce53 4904{
b3694847 4905 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4906 INSN_UID (barrier) = cur_insn_uid++;
4907 add_insn (barrier);
4908 return barrier;
4909}
4910
5f2fc772 4911/* Emit a copy of note ORIG. */
502b8322 4912
5f2fc772
NS
4913rtx
4914emit_note_copy (rtx orig)
4915{
96fba521
SB
4916 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
4917 rtx note = make_note_raw (kind);
5f2fc772 4918 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 4919 add_insn (note);
2e040219 4920 return note;
23b2ce53
RS
4921}
4922
2e040219
NS
4923/* Make an insn of code NOTE or type NOTE_NO
4924 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4925
4926rtx
a38e7aa5 4927emit_note (enum insn_note kind)
23b2ce53 4928{
96fba521 4929 rtx note = make_note_raw (kind);
23b2ce53
RS
4930 add_insn (note);
4931 return note;
4932}
4933
c41c1387
RS
4934/* Emit a clobber of lvalue X. */
4935
4936rtx
4937emit_clobber (rtx x)
4938{
4939 /* CONCATs should not appear in the insn stream. */
4940 if (GET_CODE (x) == CONCAT)
4941 {
4942 emit_clobber (XEXP (x, 0));
4943 return emit_clobber (XEXP (x, 1));
4944 }
4945 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4946}
4947
4948/* Return a sequence of insns to clobber lvalue X. */
4949
4950rtx
4951gen_clobber (rtx x)
4952{
4953 rtx seq;
4954
4955 start_sequence ();
4956 emit_clobber (x);
4957 seq = get_insns ();
4958 end_sequence ();
4959 return seq;
4960}
4961
4962/* Emit a use of rvalue X. */
4963
4964rtx
4965emit_use (rtx x)
4966{
4967 /* CONCATs should not appear in the insn stream. */
4968 if (GET_CODE (x) == CONCAT)
4969 {
4970 emit_use (XEXP (x, 0));
4971 return emit_use (XEXP (x, 1));
4972 }
4973 return emit_insn (gen_rtx_USE (VOIDmode, x));
4974}
4975
4976/* Return a sequence of insns to use rvalue X. */
4977
4978rtx
4979gen_use (rtx x)
4980{
4981 rtx seq;
4982
4983 start_sequence ();
4984 emit_use (x);
4985 seq = get_insns ();
4986 end_sequence ();
4987 return seq;
4988}
4989
87b47c85 4990/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4991 note of this type already exists, remove it first. */
87b47c85 4992
3d238248 4993rtx
502b8322 4994set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4995{
4996 rtx note = find_reg_note (insn, kind, NULL_RTX);
4997
52488da1
JW
4998 switch (kind)
4999 {
5000 case REG_EQUAL:
5001 case REG_EQUIV:
5002 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5003 has multiple sets (some callers assume single_set
5004 means the insn only has one set, when in fact it
5005 means the insn only has one * useful * set). */
5006 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5007 {
5b0264cb 5008 gcc_assert (!note);
52488da1
JW
5009 return NULL_RTX;
5010 }
5011
5012 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5013 It serves no useful purpose and breaks eliminate_regs. */
5014 if (GET_CODE (datum) == ASM_OPERANDS)
5015 return NULL_RTX;
6fb5fa3c
DB
5016
5017 if (note)
5018 {
5019 XEXP (note, 0) = datum;
5020 df_notes_rescan (insn);
5021 return note;
5022 }
52488da1
JW
5023 break;
5024
5025 default:
6fb5fa3c
DB
5026 if (note)
5027 {
5028 XEXP (note, 0) = datum;
5029 return note;
5030 }
52488da1
JW
5031 break;
5032 }
3d238248 5033
65c5f2a6 5034 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
5035
5036 switch (kind)
3d238248 5037 {
6fb5fa3c
DB
5038 case REG_EQUAL:
5039 case REG_EQUIV:
5040 df_notes_rescan (insn);
5041 break;
5042 default:
5043 break;
3d238248 5044 }
87b47c85 5045
3d238248 5046 return REG_NOTES (insn);
87b47c85 5047}
7543f918
JR
5048
5049/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5050rtx
5051set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5052{
5053 rtx set = single_set (insn);
5054
5055 if (set && SET_DEST (set) == dst)
5056 return set_unique_reg_note (insn, kind, datum);
5057 return NULL_RTX;
5058}
23b2ce53
RS
5059\f
5060/* Return an indication of which type of insn should have X as a body.
5061 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5062
d78db459 5063static enum rtx_code
502b8322 5064classify_insn (rtx x)
23b2ce53 5065{
4b4bf941 5066 if (LABEL_P (x))
23b2ce53
RS
5067 return CODE_LABEL;
5068 if (GET_CODE (x) == CALL)
5069 return CALL_INSN;
26898771 5070 if (ANY_RETURN_P (x))
23b2ce53
RS
5071 return JUMP_INSN;
5072 if (GET_CODE (x) == SET)
5073 {
5074 if (SET_DEST (x) == pc_rtx)
5075 return JUMP_INSN;
5076 else if (GET_CODE (SET_SRC (x)) == CALL)
5077 return CALL_INSN;
5078 else
5079 return INSN;
5080 }
5081 if (GET_CODE (x) == PARALLEL)
5082 {
b3694847 5083 int j;
23b2ce53
RS
5084 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5085 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5086 return CALL_INSN;
5087 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5088 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5089 return JUMP_INSN;
5090 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5091 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5092 return CALL_INSN;
5093 }
5094 return INSN;
5095}
5096
5097/* Emit the rtl pattern X as an appropriate kind of insn.
5098 If X is a label, it is simply added into the insn chain. */
5099
5100rtx
502b8322 5101emit (rtx x)
23b2ce53
RS
5102{
5103 enum rtx_code code = classify_insn (x);
5104
5b0264cb 5105 switch (code)
23b2ce53 5106 {
5b0264cb
NS
5107 case CODE_LABEL:
5108 return emit_label (x);
5109 case INSN:
5110 return emit_insn (x);
5111 case JUMP_INSN:
5112 {
5113 rtx insn = emit_jump_insn (x);
5114 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5115 return emit_barrier ();
5116 return insn;
5117 }
5118 case CALL_INSN:
5119 return emit_call_insn (x);
b5b8b0ac
AO
5120 case DEBUG_INSN:
5121 return emit_debug_insn (x);
5b0264cb
NS
5122 default:
5123 gcc_unreachable ();
23b2ce53 5124 }
23b2ce53
RS
5125}
5126\f
e2500fed 5127/* Space for free sequence stack entries. */
1431042e 5128static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5129
4dfa0342
RH
5130/* Begin emitting insns to a sequence. If this sequence will contain
5131 something that might cause the compiler to pop arguments to function
5132 calls (because those pops have previously been deferred; see
5133 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5134 before calling this function. That will ensure that the deferred
5135 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5136
5137void
502b8322 5138start_sequence (void)
23b2ce53
RS
5139{
5140 struct sequence_stack *tem;
5141
e2500fed
GK
5142 if (free_sequence_stack != NULL)
5143 {
5144 tem = free_sequence_stack;
5145 free_sequence_stack = tem->next;
5146 }
5147 else
a9429e29 5148 tem = ggc_alloc_sequence_stack ();
23b2ce53 5149
49ad7cfa 5150 tem->next = seq_stack;
5936d944
JH
5151 tem->first = get_insns ();
5152 tem->last = get_last_insn ();
23b2ce53 5153
49ad7cfa 5154 seq_stack = tem;
23b2ce53 5155
5936d944
JH
5156 set_first_insn (0);
5157 set_last_insn (0);
23b2ce53
RS
5158}
5159
5c7a310f
MM
5160/* Set up the insn chain starting with FIRST as the current sequence,
5161 saving the previously current one. See the documentation for
5162 start_sequence for more information about how to use this function. */
23b2ce53
RS
5163
5164void
502b8322 5165push_to_sequence (rtx first)
23b2ce53
RS
5166{
5167 rtx last;
5168
5169 start_sequence ();
5170
e84a58ff
EB
5171 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5172 ;
23b2ce53 5173
5936d944
JH
5174 set_first_insn (first);
5175 set_last_insn (last);
23b2ce53
RS
5176}
5177
bb27eeda
SE
5178/* Like push_to_sequence, but take the last insn as an argument to avoid
5179 looping through the list. */
5180
5181void
5182push_to_sequence2 (rtx first, rtx last)
5183{
5184 start_sequence ();
5185
5936d944
JH
5186 set_first_insn (first);
5187 set_last_insn (last);
bb27eeda
SE
5188}
5189
f15ae3a1
TW
5190/* Set up the outer-level insn chain
5191 as the current sequence, saving the previously current one. */
5192
5193void
502b8322 5194push_topmost_sequence (void)
f15ae3a1 5195{
aefdd5ab 5196 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5197
5198 start_sequence ();
5199
49ad7cfa 5200 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5201 top = stack;
5202
5936d944
JH
5203 set_first_insn (top->first);
5204 set_last_insn (top->last);
f15ae3a1
TW
5205}
5206
5207/* After emitting to the outer-level insn chain, update the outer-level
5208 insn chain, and restore the previous saved state. */
5209
5210void
502b8322 5211pop_topmost_sequence (void)
f15ae3a1 5212{
aefdd5ab 5213 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5214
49ad7cfa 5215 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5216 top = stack;
5217
5936d944
JH
5218 top->first = get_insns ();
5219 top->last = get_last_insn ();
f15ae3a1
TW
5220
5221 end_sequence ();
5222}
5223
23b2ce53
RS
5224/* After emitting to a sequence, restore previous saved state.
5225
5c7a310f 5226 To get the contents of the sequence just made, you must call
2f937369 5227 `get_insns' *before* calling here.
5c7a310f
MM
5228
5229 If the compiler might have deferred popping arguments while
5230 generating this sequence, and this sequence will not be immediately
5231 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5232 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5233 pops are inserted into this sequence, and not into some random
5234 location in the instruction stream. See INHIBIT_DEFER_POP for more
5235 information about deferred popping of arguments. */
23b2ce53
RS
5236
5237void
502b8322 5238end_sequence (void)
23b2ce53 5239{
49ad7cfa 5240 struct sequence_stack *tem = seq_stack;
23b2ce53 5241
5936d944
JH
5242 set_first_insn (tem->first);
5243 set_last_insn (tem->last);
49ad7cfa 5244 seq_stack = tem->next;
23b2ce53 5245
e2500fed
GK
5246 memset (tem, 0, sizeof (*tem));
5247 tem->next = free_sequence_stack;
5248 free_sequence_stack = tem;
23b2ce53
RS
5249}
5250
5251/* Return 1 if currently emitting into a sequence. */
5252
5253int
502b8322 5254in_sequence_p (void)
23b2ce53 5255{
49ad7cfa 5256 return seq_stack != 0;
23b2ce53 5257}
23b2ce53 5258\f
59ec66dc
MM
5259/* Put the various virtual registers into REGNO_REG_RTX. */
5260
2bbdec73 5261static void
bd60bab2 5262init_virtual_regs (void)
59ec66dc 5263{
bd60bab2
JH
5264 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5265 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5266 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5267 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5268 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5269 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5270 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5271}
5272
da43a810
BS
5273\f
5274/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5275static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5276static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5277static int copy_insn_n_scratches;
5278
5279/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5280 copied an ASM_OPERANDS.
5281 In that case, it is the original input-operand vector. */
5282static rtvec orig_asm_operands_vector;
5283
5284/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5285 copied an ASM_OPERANDS.
5286 In that case, it is the copied input-operand vector. */
5287static rtvec copy_asm_operands_vector;
5288
5289/* Likewise for the constraints vector. */
5290static rtvec orig_asm_constraints_vector;
5291static rtvec copy_asm_constraints_vector;
5292
5293/* Recursively create a new copy of an rtx for copy_insn.
5294 This function differs from copy_rtx in that it handles SCRATCHes and
5295 ASM_OPERANDs properly.
5296 Normally, this function is not used directly; use copy_insn as front end.
5297 However, you could first copy an insn pattern with copy_insn and then use
5298 this function afterwards to properly copy any REG_NOTEs containing
5299 SCRATCHes. */
5300
5301rtx
502b8322 5302copy_insn_1 (rtx orig)
da43a810 5303{
b3694847
SS
5304 rtx copy;
5305 int i, j;
5306 RTX_CODE code;
5307 const char *format_ptr;
da43a810 5308
cd9c1ca8
RH
5309 if (orig == NULL)
5310 return NULL;
5311
da43a810
BS
5312 code = GET_CODE (orig);
5313
5314 switch (code)
5315 {
5316 case REG:
a52a87c3 5317 case DEBUG_EXPR:
d8116890 5318 CASE_CONST_ANY:
da43a810
BS
5319 case SYMBOL_REF:
5320 case CODE_LABEL:
5321 case PC:
5322 case CC0:
276e0224 5323 case RETURN:
26898771 5324 case SIMPLE_RETURN:
da43a810 5325 return orig;
3e89ed8d 5326 case CLOBBER:
c5c5ba89
JH
5327 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5328 clobbers or clobbers of hard registers that originated as pseudos.
5329 This is needed to allow safe register renaming. */
5330 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5331 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
3e89ed8d
JH
5332 return orig;
5333 break;
da43a810
BS
5334
5335 case SCRATCH:
5336 for (i = 0; i < copy_insn_n_scratches; i++)
5337 if (copy_insn_scratch_in[i] == orig)
5338 return copy_insn_scratch_out[i];
5339 break;
5340
5341 case CONST:
6fb5fa3c 5342 if (shared_const_p (orig))
da43a810
BS
5343 return orig;
5344 break;
750c9258 5345
da43a810
BS
5346 /* A MEM with a constant address is not sharable. The problem is that
5347 the constant address may need to be reloaded. If the mem is shared,
5348 then reloading one copy of this mem will cause all copies to appear
5349 to have been reloaded. */
5350
5351 default:
5352 break;
5353 }
5354
aacd3885
RS
5355 /* Copy the various flags, fields, and other information. We assume
5356 that all fields need copying, and then clear the fields that should
da43a810
BS
5357 not be copied. That is the sensible default behavior, and forces
5358 us to explicitly document why we are *not* copying a flag. */
aacd3885 5359 copy = shallow_copy_rtx (orig);
da43a810
BS
5360
5361 /* We do not copy the USED flag, which is used as a mark bit during
5362 walks over the RTL. */
2adc7f12 5363 RTX_FLAG (copy, used) = 0;
da43a810
BS
5364
5365 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5366 if (INSN_P (orig))
da43a810 5367 {
2adc7f12
JJ
5368 RTX_FLAG (copy, jump) = 0;
5369 RTX_FLAG (copy, call) = 0;
5370 RTX_FLAG (copy, frame_related) = 0;
da43a810 5371 }
750c9258 5372
da43a810
BS
5373 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5374
5375 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5376 switch (*format_ptr++)
5377 {
5378 case 'e':
5379 if (XEXP (orig, i) != NULL)
5380 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5381 break;
da43a810 5382
aacd3885
RS
5383 case 'E':
5384 case 'V':
5385 if (XVEC (orig, i) == orig_asm_constraints_vector)
5386 XVEC (copy, i) = copy_asm_constraints_vector;
5387 else if (XVEC (orig, i) == orig_asm_operands_vector)
5388 XVEC (copy, i) = copy_asm_operands_vector;
5389 else if (XVEC (orig, i) != NULL)
5390 {
5391 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5392 for (j = 0; j < XVECLEN (copy, i); j++)
5393 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5394 }
5395 break;
da43a810 5396
aacd3885
RS
5397 case 't':
5398 case 'w':
5399 case 'i':
5400 case 's':
5401 case 'S':
5402 case 'u':
5403 case '0':
5404 /* These are left unchanged. */
5405 break;
da43a810 5406
aacd3885
RS
5407 default:
5408 gcc_unreachable ();
5409 }
da43a810
BS
5410
5411 if (code == SCRATCH)
5412 {
5413 i = copy_insn_n_scratches++;
5b0264cb 5414 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5415 copy_insn_scratch_in[i] = orig;
5416 copy_insn_scratch_out[i] = copy;
5417 }
5418 else if (code == ASM_OPERANDS)
5419 {
6462bb43
AO
5420 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5421 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5422 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5423 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5424 }
5425
5426 return copy;
5427}
5428
5429/* Create a new copy of an rtx.
5430 This function differs from copy_rtx in that it handles SCRATCHes and
5431 ASM_OPERANDs properly.
5432 INSN doesn't really have to be a full INSN; it could be just the
5433 pattern. */
5434rtx
502b8322 5435copy_insn (rtx insn)
da43a810
BS
5436{
5437 copy_insn_n_scratches = 0;
5438 orig_asm_operands_vector = 0;
5439 orig_asm_constraints_vector = 0;
5440 copy_asm_operands_vector = 0;
5441 copy_asm_constraints_vector = 0;
5442 return copy_insn_1 (insn);
5443}
59ec66dc 5444
8e383849
JR
5445/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5446 on that assumption that INSN itself remains in its original place. */
5447
5448rtx
5449copy_delay_slot_insn (rtx insn)
5450{
5451 /* Copy INSN with its rtx_code, all its notes, location etc. */
5452 insn = copy_rtx (insn);
5453 INSN_UID (insn) = cur_insn_uid++;
5454 return insn;
5455}
5456
23b2ce53
RS
5457/* Initialize data structures and variables in this file
5458 before generating rtl for each function. */
5459
5460void
502b8322 5461init_emit (void)
23b2ce53 5462{
5936d944
JH
5463 set_first_insn (NULL);
5464 set_last_insn (NULL);
b5b8b0ac
AO
5465 if (MIN_NONDEBUG_INSN_UID)
5466 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5467 else
5468 cur_insn_uid = 1;
5469 cur_debug_insn_uid = 1;
23b2ce53 5470 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5471 first_label_num = label_num;
49ad7cfa 5472 seq_stack = NULL;
23b2ce53 5473
23b2ce53
RS
5474 /* Init the tables that describe all the pseudo regs. */
5475
3e029763 5476 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5477
3e029763 5478 crtl->emit.regno_pointer_align
1b4572a8 5479 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5480
a9429e29 5481 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
0d4903b8 5482
e50126e8 5483 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5484 memcpy (regno_reg_rtx,
5fb0e246 5485 initial_regno_reg_rtx,
6cde4876 5486 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5487
23b2ce53 5488 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5489 init_virtual_regs ();
740ab4a2
RK
5490
5491 /* Indicate that the virtual registers and stack locations are
5492 all pointers. */
3502dc9c
JDA
5493 REG_POINTER (stack_pointer_rtx) = 1;
5494 REG_POINTER (frame_pointer_rtx) = 1;
5495 REG_POINTER (hard_frame_pointer_rtx) = 1;
5496 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5497
3502dc9c
JDA
5498 REG_POINTER (virtual_incoming_args_rtx) = 1;
5499 REG_POINTER (virtual_stack_vars_rtx) = 1;
5500 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5501 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5502 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5503
86fe05e0 5504#ifdef STACK_BOUNDARY
bdb429a5
RK
5505 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5506 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5507 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5508 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5509
5510 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5511 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5512 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5513 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5514 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5515#endif
5516
5e82e7bd
JVA
5517#ifdef INIT_EXPANDERS
5518 INIT_EXPANDERS;
5519#endif
23b2ce53
RS
5520}
5521
a73b091d 5522/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5523
5524static rtx
a73b091d 5525gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5526{
5527 rtx tem;
5528 rtvec v;
5529 int units, i;
5530 enum machine_mode inner;
5531
5532 units = GET_MODE_NUNITS (mode);
5533 inner = GET_MODE_INNER (mode);
5534
15ed7b52
JG
5535 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5536
69ef87e2
AH
5537 v = rtvec_alloc (units);
5538
a73b091d
JW
5539 /* We need to call this function after we set the scalar const_tiny_rtx
5540 entries. */
5541 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5542
5543 for (i = 0; i < units; ++i)
a73b091d 5544 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5545
a06e3c40 5546 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5547 return tem;
5548}
5549
a06e3c40 5550/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5551 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5552rtx
502b8322 5553gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5554{
a73b091d
JW
5555 enum machine_mode inner = GET_MODE_INNER (mode);
5556 int nunits = GET_MODE_NUNITS (mode);
5557 rtx x;
a06e3c40
R
5558 int i;
5559
a73b091d
JW
5560 /* Check to see if all of the elements have the same value. */
5561 x = RTVEC_ELT (v, nunits - 1);
5562 for (i = nunits - 2; i >= 0; i--)
5563 if (RTVEC_ELT (v, i) != x)
5564 break;
5565
5566 /* If the values are all the same, check to see if we can use one of the
5567 standard constant vectors. */
5568 if (i == -1)
5569 {
5570 if (x == CONST0_RTX (inner))
5571 return CONST0_RTX (mode);
5572 else if (x == CONST1_RTX (inner))
5573 return CONST1_RTX (mode);
e7c82a99
JJ
5574 else if (x == CONSTM1_RTX (inner))
5575 return CONSTM1_RTX (mode);
a73b091d
JW
5576 }
5577
5578 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5579}
5580
b5deb7b6
SL
5581/* Initialise global register information required by all functions. */
5582
5583void
5584init_emit_regs (void)
5585{
5586 int i;
1c3f523e
RS
5587 enum machine_mode mode;
5588 mem_attrs *attrs;
b5deb7b6
SL
5589
5590 /* Reset register attributes */
5591 htab_empty (reg_attrs_htab);
5592
5593 /* We need reg_raw_mode, so initialize the modes now. */
5594 init_reg_modes_target ();
5595
5596 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5597 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5598 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5599 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5600 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5601 virtual_incoming_args_rtx =
5602 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5603 virtual_stack_vars_rtx =
5604 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5605 virtual_stack_dynamic_rtx =
5606 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5607 virtual_outgoing_args_rtx =
5608 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5609 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5610 virtual_preferred_stack_boundary_rtx =
5611 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5612
5613 /* Initialize RTL for commonly used hard registers. These are
5614 copied into regno_reg_rtx as we begin to compile each function. */
5615 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5616 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5617
5618#ifdef RETURN_ADDRESS_POINTER_REGNUM
5619 return_address_pointer_rtx
5620 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5621#endif
5622
b5deb7b6
SL
5623 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5624 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5625 else
5626 pic_offset_table_rtx = NULL_RTX;
1c3f523e
RS
5627
5628 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5629 {
5630 mode = (enum machine_mode) i;
5631 attrs = ggc_alloc_cleared_mem_attrs ();
5632 attrs->align = BITS_PER_UNIT;
5633 attrs->addrspace = ADDR_SPACE_GENERIC;
5634 if (mode != BLKmode)
5635 {
754c3d5d
RS
5636 attrs->size_known_p = true;
5637 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5638 if (STRICT_ALIGNMENT)
5639 attrs->align = GET_MODE_ALIGNMENT (mode);
5640 }
5641 mode_mem_attrs[i] = attrs;
5642 }
b5deb7b6
SL
5643}
5644
2d888286 5645/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5646
5647void
2d888286 5648init_emit_once (void)
23b2ce53
RS
5649{
5650 int i;
5651 enum machine_mode mode;
9ec36da5 5652 enum machine_mode double_mode;
23b2ce53 5653
091a3ac7
CF
5654 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5655 hash tables. */
17211ab5
GK
5656 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5657 const_int_htab_eq, NULL);
173b24b9 5658
17211ab5
GK
5659 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5660 const_double_htab_eq, NULL);
5692c7bc 5661
091a3ac7
CF
5662 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5663 const_fixed_htab_eq, NULL);
5664
17211ab5
GK
5665 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5666 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5667 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5668 reg_attrs_htab_eq, NULL);
67673f5c 5669
43fa6302
AS
5670 /* Compute the word and byte modes. */
5671
5672 byte_mode = VOIDmode;
5673 word_mode = VOIDmode;
5674 double_mode = VOIDmode;
5675
15ed7b52
JG
5676 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5677 mode != VOIDmode;
43fa6302
AS
5678 mode = GET_MODE_WIDER_MODE (mode))
5679 {
5680 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5681 && byte_mode == VOIDmode)
5682 byte_mode = mode;
5683
5684 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5685 && word_mode == VOIDmode)
5686 word_mode = mode;
5687 }
5688
15ed7b52
JG
5689 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5690 mode != VOIDmode;
43fa6302
AS
5691 mode = GET_MODE_WIDER_MODE (mode))
5692 {
5693 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5694 && double_mode == VOIDmode)
5695 double_mode = mode;
5696 }
5697
5698 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5699
5da077de 5700#ifdef INIT_EXPANDERS
414c4dc4
NC
5701 /* This is to initialize {init|mark|free}_machine_status before the first
5702 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5703 end which calls push_function_context_to before the first call to
5da077de
AS
5704 init_function_start. */
5705 INIT_EXPANDERS;
5706#endif
5707
23b2ce53
RS
5708 /* Create the unique rtx's for certain rtx codes and operand values. */
5709
a2a8cc44 5710 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5711 tries to use these variables. */
23b2ce53 5712 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5713 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5714 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5715
68d75312
JC
5716 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5717 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5718 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5719 else
3b80f6ca 5720 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5721
5692c7bc
ZW
5722 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5723 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5724 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5725
5726 dconstm1 = dconst1;
5727 dconstm1.sign = 1;
03f2ea93
RS
5728
5729 dconsthalf = dconst1;
1e92bbb9 5730 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5731
e7c82a99 5732 for (i = 0; i < 3; i++)
23b2ce53 5733 {
aefa9d43 5734 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5735 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5736
15ed7b52
JG
5737 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5738 mode != VOIDmode;
5739 mode = GET_MODE_WIDER_MODE (mode))
5740 const_tiny_rtx[i][(int) mode] =
5741 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5742
5743 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5744 mode != VOIDmode;
23b2ce53 5745 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5746 const_tiny_rtx[i][(int) mode] =
5747 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5748
906c4e36 5749 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5750
15ed7b52
JG
5751 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5752 mode != VOIDmode;
23b2ce53 5753 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5754 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5755
ede6c734
MS
5756 for (mode = MIN_MODE_PARTIAL_INT;
5757 mode <= MAX_MODE_PARTIAL_INT;
5758 mode = (enum machine_mode)((int)(mode) + 1))
33d3e559 5759 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5760 }
5761
e7c82a99
JJ
5762 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5763
5764 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5765 mode != VOIDmode;
5766 mode = GET_MODE_WIDER_MODE (mode))
5767 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5768
ede6c734
MS
5769 for (mode = MIN_MODE_PARTIAL_INT;
5770 mode <= MAX_MODE_PARTIAL_INT;
5771 mode = (enum machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5772 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5773
e90721b1
AP
5774 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5775 mode != VOIDmode;
5776 mode = GET_MODE_WIDER_MODE (mode))
5777 {
5778 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5779 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5780 }
5781
5782 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5783 mode != VOIDmode;
5784 mode = GET_MODE_WIDER_MODE (mode))
5785 {
5786 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5787 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5788 }
5789
69ef87e2
AH
5790 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5791 mode != VOIDmode;
5792 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5793 {
5794 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5795 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 5796 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 5797 }
69ef87e2
AH
5798
5799 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5800 mode != VOIDmode;
5801 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5802 {
5803 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5804 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5805 }
69ef87e2 5806
325217ed
CF
5807 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5808 mode != VOIDmode;
5809 mode = GET_MODE_WIDER_MODE (mode))
5810 {
c3284718
RS
5811 FCONST0 (mode).data.high = 0;
5812 FCONST0 (mode).data.low = 0;
5813 FCONST0 (mode).mode = mode;
091a3ac7
CF
5814 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5815 FCONST0 (mode), mode);
325217ed
CF
5816 }
5817
5818 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5819 mode != VOIDmode;
5820 mode = GET_MODE_WIDER_MODE (mode))
5821 {
c3284718
RS
5822 FCONST0 (mode).data.high = 0;
5823 FCONST0 (mode).data.low = 0;
5824 FCONST0 (mode).mode = mode;
091a3ac7
CF
5825 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5826 FCONST0 (mode), mode);
325217ed
CF
5827 }
5828
5829 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5830 mode != VOIDmode;
5831 mode = GET_MODE_WIDER_MODE (mode))
5832 {
c3284718
RS
5833 FCONST0 (mode).data.high = 0;
5834 FCONST0 (mode).data.low = 0;
5835 FCONST0 (mode).mode = mode;
091a3ac7
CF
5836 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5837 FCONST0 (mode), mode);
325217ed
CF
5838
5839 /* We store the value 1. */
c3284718
RS
5840 FCONST1 (mode).data.high = 0;
5841 FCONST1 (mode).data.low = 0;
5842 FCONST1 (mode).mode = mode;
5843 FCONST1 (mode).data
9be0ac8c
LC
5844 = double_int_one.lshift (GET_MODE_FBIT (mode),
5845 HOST_BITS_PER_DOUBLE_INT,
5846 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5847 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5848 FCONST1 (mode), mode);
325217ed
CF
5849 }
5850
5851 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5852 mode != VOIDmode;
5853 mode = GET_MODE_WIDER_MODE (mode))
5854 {
c3284718
RS
5855 FCONST0 (mode).data.high = 0;
5856 FCONST0 (mode).data.low = 0;
5857 FCONST0 (mode).mode = mode;
091a3ac7
CF
5858 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5859 FCONST0 (mode), mode);
325217ed
CF
5860
5861 /* We store the value 1. */
c3284718
RS
5862 FCONST1 (mode).data.high = 0;
5863 FCONST1 (mode).data.low = 0;
5864 FCONST1 (mode).mode = mode;
5865 FCONST1 (mode).data
9be0ac8c
LC
5866 = double_int_one.lshift (GET_MODE_FBIT (mode),
5867 HOST_BITS_PER_DOUBLE_INT,
5868 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5869 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5870 FCONST1 (mode), mode);
5871 }
5872
5873 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5874 mode != VOIDmode;
5875 mode = GET_MODE_WIDER_MODE (mode))
5876 {
5877 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5878 }
5879
5880 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5881 mode != VOIDmode;
5882 mode = GET_MODE_WIDER_MODE (mode))
5883 {
5884 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5885 }
5886
5887 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5888 mode != VOIDmode;
5889 mode = GET_MODE_WIDER_MODE (mode))
5890 {
5891 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5892 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5893 }
5894
5895 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5896 mode != VOIDmode;
5897 mode = GET_MODE_WIDER_MODE (mode))
5898 {
5899 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5900 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5901 }
5902
dbbbbf3b
JDA
5903 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5904 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5905 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5906
f0417c82
RH
5907 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5908 if (STORE_FLAG_VALUE == 1)
5909 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91
RS
5910
5911 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5912 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5913 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5914 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
23b2ce53 5915}
a11759a3 5916\f
969d70ca
JH
5917/* Produce exact duplicate of insn INSN after AFTER.
5918 Care updating of libcall regions if present. */
5919
5920rtx
502b8322 5921emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5922{
60564289 5923 rtx new_rtx, link;
969d70ca
JH
5924
5925 switch (GET_CODE (insn))
5926 {
5927 case INSN:
60564289 5928 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5929 break;
5930
5931 case JUMP_INSN:
60564289 5932 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5933 break;
5934
b5b8b0ac
AO
5935 case DEBUG_INSN:
5936 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5937 break;
5938
969d70ca 5939 case CALL_INSN:
60564289 5940 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5941 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5942 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5943 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5944 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5945 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5946 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 5947 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 5948 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
5949 break;
5950
5951 default:
5b0264cb 5952 gcc_unreachable ();
969d70ca
JH
5953 }
5954
5955 /* Update LABEL_NUSES. */
60564289 5956 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 5957
5368224f 5958 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 5959
0a3d71f5
JW
5960 /* If the old insn is frame related, then so is the new one. This is
5961 primarily needed for IA-64 unwind info which marks epilogue insns,
5962 which may be duplicated by the basic block reordering code. */
60564289 5963 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 5964
cf7c4aa6
HPN
5965 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5966 will make them. REG_LABEL_TARGETs are created there too, but are
5967 supposed to be sticky, so we copy them. */
969d70ca 5968 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 5969 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
5970 {
5971 if (GET_CODE (link) == EXPR_LIST)
60564289 5972 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 5973 copy_insn_1 (XEXP (link, 0)));
969d70ca 5974 else
e5af9ddd 5975 add_shallow_copy_of_reg_note (new_rtx, link);
969d70ca
JH
5976 }
5977
60564289
KG
5978 INSN_CODE (new_rtx) = INSN_CODE (insn);
5979 return new_rtx;
969d70ca 5980}
e2500fed 5981
1431042e 5982static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5983rtx
5984gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5985{
5986 if (hard_reg_clobbers[mode][regno])
5987 return hard_reg_clobbers[mode][regno];
5988 else
5989 return (hard_reg_clobbers[mode][regno] =
5990 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5991}
5992
5368224f
DC
5993location_t prologue_location;
5994location_t epilogue_location;
78bde837
SB
5995
5996/* Hold current location information and last location information, so the
5997 datastructures are built lazily only when some instructions in given
5998 place are needed. */
3a50da34 5999static location_t curr_location;
78bde837 6000
5368224f 6001/* Allocate insn location datastructure. */
78bde837 6002void
5368224f 6003insn_locations_init (void)
78bde837 6004{
5368224f 6005 prologue_location = epilogue_location = 0;
78bde837 6006 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6007}
6008
6009/* At the end of emit stage, clear current location. */
6010void
5368224f 6011insn_locations_finalize (void)
78bde837 6012{
5368224f
DC
6013 epilogue_location = curr_location;
6014 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6015}
6016
6017/* Set current location. */
6018void
5368224f 6019set_curr_insn_location (location_t location)
78bde837 6020{
78bde837
SB
6021 curr_location = location;
6022}
6023
6024/* Get current location. */
6025location_t
5368224f 6026curr_insn_location (void)
78bde837
SB
6027{
6028 return curr_location;
6029}
6030
78bde837
SB
6031/* Return lexical scope block insn belongs to. */
6032tree
6033insn_scope (const_rtx insn)
6034{
5368224f 6035 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6036}
6037
6038/* Return line number of the statement that produced this insn. */
6039int
6040insn_line (const_rtx insn)
6041{
5368224f 6042 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6043}
6044
6045/* Return source file of the statement that produced this insn. */
6046const char *
6047insn_file (const_rtx insn)
6048{
5368224f 6049 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6050}
8930883e
MK
6051
6052/* Return true if memory model MODEL requires a pre-operation (release-style)
6053 barrier or a post-operation (acquire-style) barrier. While not universal,
6054 this function matches behavior of several targets. */
6055
6056bool
6057need_atomic_barrier_p (enum memmodel model, bool pre)
6058{
88e784e6 6059 switch (model & MEMMODEL_MASK)
8930883e
MK
6060 {
6061 case MEMMODEL_RELAXED:
6062 case MEMMODEL_CONSUME:
6063 return false;
6064 case MEMMODEL_RELEASE:
6065 return pre;
6066 case MEMMODEL_ACQUIRE:
6067 return !pre;
6068 case MEMMODEL_ACQ_REL:
6069 case MEMMODEL_SEQ_CST:
6070 return true;
6071 default:
6072 gcc_unreachable ();
6073 }
6074}
6075\f
e2500fed 6076#include "gt-emit-rtl.h"