]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
Fix my last changelog entry.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
54fb1ae0 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
c2969d8e 4 2010, 2011
b6f65e3c 5 Free Software Foundation, Inc.
23b2ce53 6
1322177d 7This file is part of GCC.
23b2ce53 8
1322177d
LB
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
1322177d 12version.
23b2ce53 13
1322177d
LB
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
23b2ce53
RS
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
23b2ce53
RS
22
23
24/* Middle-to-low level generation of rtx code and insns.
25
f822fcf7
KH
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
23b2ce53
RS
36
37#include "config.h"
670ee920 38#include "system.h"
4977bab6
ZW
39#include "coretypes.h"
40#include "tm.h"
718f9c0f 41#include "diagnostic-core.h"
23b2ce53 42#include "rtl.h"
a25c7971 43#include "tree.h"
6baf1cc8 44#include "tm_p.h"
23b2ce53
RS
45#include "flags.h"
46#include "function.h"
47#include "expr.h"
48#include "regs.h"
aff48bca 49#include "hard-reg-set.h"
c13e8210 50#include "hashtab.h"
23b2ce53 51#include "insn-config.h"
e9a25f70 52#include "recog.h"
0dfa1860 53#include "bitmap.h"
a05924f9 54#include "basic-block.h"
87ff9c8e 55#include "ggc.h"
e1772ac0 56#include "debug.h"
d23c55c2 57#include "langhooks.h"
ef330312 58#include "tree-pass.h"
6fb5fa3c 59#include "df.h"
b5b8b0ac 60#include "params.h"
d4ebfa65 61#include "target.h"
4994da65 62#include "tree-flow.h"
ca695ac9 63
5fb0e246
RS
64struct target_rtl default_target_rtl;
65#if SWITCHABLE_TARGET
66struct target_rtl *this_target_rtl = &default_target_rtl;
67#endif
68
69#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
1d445e9e
ILT
71/* Commonly used modes. */
72
0f41302f
MS
73enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 75enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 76enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 77
bd60bab2
JH
78/* Datastructures maintained for currently processed function in RTL form. */
79
3e029763 80struct rtl_data x_rtl;
bd60bab2
JH
81
82/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 83 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87rtx * regno_reg_rtx;
23b2ce53
RS
88
89/* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
044b4de3 92static GTY(()) int label_num = 1;
23b2ce53 93
23b2ce53
RS
94/* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
97
98rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99
68d75312
JC
100rtx const_true_rtx;
101
23b2ce53
RS
102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
03f2ea93 106REAL_VALUE_TYPE dconsthalf;
23b2ce53 107
325217ed
CF
108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
23b2ce53
RS
112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
5da077de 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 118
c13e8210
MM
119/* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
121
e2500fed
GK
122static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
123 htab_t const_int_htab;
c13e8210 124
173b24b9 125/* A hash table storing memory attribute structures. */
e2500fed
GK
126static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
127 htab_t mem_attrs_htab;
173b24b9 128
a560d4d4
JH
129/* A hash table storing register attribute structures. */
130static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
132
5692c7bc 133/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
134static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
5692c7bc 136
091a3ac7
CF
137/* A hash table storing all CONST_FIXEDs. */
138static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
140
3e029763 141#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 142#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763
JH
143#define last_location (crtl->emit.x_last_location)
144#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 145
502b8322 146static rtx make_call_insn_raw (rtx);
502b8322 147static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 148static void set_used_decls (tree);
502b8322
AJ
149static void mark_label_nuses (rtx);
150static hashval_t const_int_htab_hash (const void *);
151static int const_int_htab_eq (const void *, const void *);
152static hashval_t const_double_htab_hash (const void *);
153static int const_double_htab_eq (const void *, const void *);
154static rtx lookup_const_double (rtx);
091a3ac7
CF
155static hashval_t const_fixed_htab_hash (const void *);
156static int const_fixed_htab_eq (const void *, const void *);
157static rtx lookup_const_fixed (rtx);
502b8322
AJ
158static hashval_t mem_attrs_htab_hash (const void *);
159static int mem_attrs_htab_eq (const void *, const void *);
502b8322
AJ
160static hashval_t reg_attrs_htab_hash (const void *);
161static int reg_attrs_htab_eq (const void *, const void *);
162static reg_attrs *get_reg_attrs (tree, int);
a73b091d 163static rtx gen_const_vector (enum machine_mode, int);
32b32b16 164static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 165
6b24c259
JH
166/* Probability of the conditional branch currently proceeded by try_split.
167 Set to -1 otherwise. */
168int split_branch_probability = -1;
ca695ac9 169\f
c13e8210
MM
170/* Returns a hash code for X (which is a really a CONST_INT). */
171
172static hashval_t
502b8322 173const_int_htab_hash (const void *x)
c13e8210 174{
f7d504c2 175 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
176}
177
cc2902df 178/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
179 CONST_INT) is the same as that given by Y (which is really a
180 HOST_WIDE_INT *). */
181
182static int
502b8322 183const_int_htab_eq (const void *x, const void *y)
c13e8210 184{
f7d504c2 185 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
186}
187
188/* Returns a hash code for X (which is really a CONST_DOUBLE). */
189static hashval_t
502b8322 190const_double_htab_hash (const void *x)
5692c7bc 191{
f7d504c2 192 const_rtx const value = (const_rtx) x;
46b33600 193 hashval_t h;
5692c7bc 194
46b33600
RH
195 if (GET_MODE (value) == VOIDmode)
196 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
197 else
fe352c29 198 {
15c812e3 199 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
200 /* MODE is used in the comparison, so it should be in the hash. */
201 h ^= GET_MODE (value);
202 }
5692c7bc
ZW
203 return h;
204}
205
cc2902df 206/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
207 is the same as that represented by Y (really a ...) */
208static int
502b8322 209const_double_htab_eq (const void *x, const void *y)
5692c7bc 210{
f7d504c2 211 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
212
213 if (GET_MODE (a) != GET_MODE (b))
214 return 0;
8580f7a0
RH
215 if (GET_MODE (a) == VOIDmode)
216 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
217 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
218 else
219 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
220 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
221}
222
091a3ac7
CF
223/* Returns a hash code for X (which is really a CONST_FIXED). */
224
225static hashval_t
226const_fixed_htab_hash (const void *x)
227{
3101faab 228 const_rtx const value = (const_rtx) x;
091a3ac7
CF
229 hashval_t h;
230
231 h = fixed_hash (CONST_FIXED_VALUE (value));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h ^= GET_MODE (value);
234 return h;
235}
236
237/* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...). */
239
240static int
241const_fixed_htab_eq (const void *x, const void *y)
242{
3101faab 243 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
244
245 if (GET_MODE (a) != GET_MODE (b))
246 return 0;
247 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
248}
249
173b24b9
RK
250/* Returns a hash code for X (which is a really a mem_attrs *). */
251
252static hashval_t
502b8322 253mem_attrs_htab_hash (const void *x)
173b24b9 254{
f7d504c2 255 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
256
257 return (p->alias ^ (p->align * 1000)
09e881c9 258 ^ (p->addrspace * 4000)
754c3d5d
RS
259 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
260 ^ ((p->size_known_p ? p->size : 0) * 2500000)
78b76d08 261 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
262}
263
f12144dd 264/* Return true if the given memory attributes are equal. */
c13e8210 265
f12144dd
RS
266static bool
267mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 268{
754c3d5d
RS
269 return (p->alias == q->alias
270 && p->offset_known_p == q->offset_known_p
271 && (!p->offset_known_p || p->offset == q->offset)
272 && p->size_known_p == q->size_known_p
273 && (!p->size_known_p || p->size == q->size)
274 && p->align == q->align
09e881c9 275 && p->addrspace == q->addrspace
78b76d08
SB
276 && (p->expr == q->expr
277 || (p->expr != NULL_TREE && q->expr != NULL_TREE
278 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
279}
280
f12144dd
RS
281/* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
283 mem_attrs *). */
173b24b9 284
f12144dd
RS
285static int
286mem_attrs_htab_eq (const void *x, const void *y)
173b24b9 287{
f12144dd
RS
288 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
289}
173b24b9 290
f12144dd 291/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 292
f12144dd
RS
293static void
294set_mem_attrs (rtx mem, mem_attrs *attrs)
295{
296 void **slot;
297
298 /* If everything is the default, we can just clear the attributes. */
299 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
300 {
301 MEM_ATTRS (mem) = 0;
302 return;
303 }
173b24b9 304
f12144dd 305 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
173b24b9
RK
306 if (*slot == 0)
307 {
a9429e29 308 *slot = ggc_alloc_mem_attrs ();
f12144dd 309 memcpy (*slot, attrs, sizeof (mem_attrs));
173b24b9
RK
310 }
311
f12144dd 312 MEM_ATTRS (mem) = (mem_attrs *) *slot;
c13e8210
MM
313}
314
a560d4d4
JH
315/* Returns a hash code for X (which is a really a reg_attrs *). */
316
317static hashval_t
502b8322 318reg_attrs_htab_hash (const void *x)
a560d4d4 319{
741ac903 320 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4 321
9841210f 322 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
323}
324
6356f892 325/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
326 reg_attrs *) is the same as that given by Y (which is also really a
327 reg_attrs *). */
328
329static int
502b8322 330reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 331{
741ac903
KG
332 const reg_attrs *const p = (const reg_attrs *) x;
333 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
334
335 return (p->decl == q->decl && p->offset == q->offset);
336}
337/* Allocate a new reg_attrs structure and insert it into the hash table if
338 one identical to it is not already in the table. We are doing this for
339 MEM of mode MODE. */
340
341static reg_attrs *
502b8322 342get_reg_attrs (tree decl, int offset)
a560d4d4
JH
343{
344 reg_attrs attrs;
345 void **slot;
346
347 /* If everything is the default, we can just return zero. */
348 if (decl == 0 && offset == 0)
349 return 0;
350
351 attrs.decl = decl;
352 attrs.offset = offset;
353
354 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
355 if (*slot == 0)
356 {
a9429e29 357 *slot = ggc_alloc_reg_attrs ();
a560d4d4
JH
358 memcpy (*slot, &attrs, sizeof (reg_attrs));
359 }
360
1b4572a8 361 return (reg_attrs *) *slot;
a560d4d4
JH
362}
363
6fb5fa3c
DB
364
365#if !HAVE_blockage
366/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
367 across this insn. */
368
369rtx
370gen_blockage (void)
371{
372 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
373 MEM_VOLATILE_P (x) = true;
374 return x;
375}
376#endif
377
378
08394eef
BS
379/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
380 don't attempt to share with the various global pieces of rtl (such as
381 frame_pointer_rtx). */
382
383rtx
502b8322 384gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
385{
386 rtx x = gen_rtx_raw_REG (mode, regno);
387 ORIGINAL_REGNO (x) = regno;
388 return x;
389}
390
c5c76735
JL
391/* There are some RTL codes that require special attention; the generation
392 functions do the raw handling. If you add to this list, modify
393 special_rtx in gengenrtl.c as well. */
394
3b80f6ca 395rtx
502b8322 396gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 397{
c13e8210
MM
398 void **slot;
399
3b80f6ca 400 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 401 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
402
403#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
404 if (const_true_rtx && arg == STORE_FLAG_VALUE)
405 return const_true_rtx;
406#endif
407
c13e8210 408 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
409 slot = htab_find_slot_with_hash (const_int_htab, &arg,
410 (hashval_t) arg, INSERT);
29105cea 411 if (*slot == 0)
1f8f4a0b 412 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
413
414 return (rtx) *slot;
3b80f6ca
RH
415}
416
2496c7bd 417rtx
502b8322 418gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
419{
420 return GEN_INT (trunc_int_for_mode (c, mode));
421}
422
5692c7bc
ZW
423/* CONST_DOUBLEs might be created from pairs of integers, or from
424 REAL_VALUE_TYPEs. Also, their length is known only at run time,
425 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
426
427/* Determine whether REAL, a CONST_DOUBLE, already exists in the
428 hash table. If so, return its counterpart; otherwise add it
429 to the hash table and return it. */
430static rtx
502b8322 431lookup_const_double (rtx real)
5692c7bc
ZW
432{
433 void **slot = htab_find_slot (const_double_htab, real, INSERT);
434 if (*slot == 0)
435 *slot = real;
436
437 return (rtx) *slot;
438}
29105cea 439
5692c7bc
ZW
440/* Return a CONST_DOUBLE rtx for a floating-point value specified by
441 VALUE in mode MODE. */
0133b7d9 442rtx
502b8322 443const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 444{
5692c7bc
ZW
445 rtx real = rtx_alloc (CONST_DOUBLE);
446 PUT_MODE (real, mode);
447
9e254451 448 real->u.rv = value;
5692c7bc
ZW
449
450 return lookup_const_double (real);
451}
452
091a3ac7
CF
453/* Determine whether FIXED, a CONST_FIXED, already exists in the
454 hash table. If so, return its counterpart; otherwise add it
455 to the hash table and return it. */
456
457static rtx
458lookup_const_fixed (rtx fixed)
459{
460 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
461 if (*slot == 0)
462 *slot = fixed;
463
464 return (rtx) *slot;
465}
466
467/* Return a CONST_FIXED rtx for a fixed-point value specified by
468 VALUE in mode MODE. */
469
470rtx
471const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
472{
473 rtx fixed = rtx_alloc (CONST_FIXED);
474 PUT_MODE (fixed, mode);
475
476 fixed->u.fv = value;
477
478 return lookup_const_fixed (fixed);
479}
480
3e93ff81
AS
481/* Constructs double_int from rtx CST. */
482
483double_int
484rtx_to_double_int (const_rtx cst)
485{
486 double_int r;
487
488 if (CONST_INT_P (cst))
489 r = shwi_to_double_int (INTVAL (cst));
490 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
491 {
492 r.low = CONST_DOUBLE_LOW (cst);
493 r.high = CONST_DOUBLE_HIGH (cst);
494 }
495 else
496 gcc_unreachable ();
497
498 return r;
499}
500
501
54fb1ae0
AS
502/* Return a CONST_DOUBLE or CONST_INT for a value specified as
503 a double_int. */
504
505rtx
506immed_double_int_const (double_int i, enum machine_mode mode)
507{
508 return immed_double_const (i.low, i.high, mode);
509}
510
5692c7bc
ZW
511/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
512 of ints: I0 is the low-order word and I1 is the high-order word.
513 Do not use this routine for non-integer modes; convert to
514 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
515
516rtx
502b8322 517immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
518{
519 rtx value;
520 unsigned int i;
521
65acccdd
ZD
522 /* There are the following cases (note that there are no modes with
523 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
524
525 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
526 gen_int_mode.
527 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
528 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
b8698a0f 529 from copies of the sign bit, and sign of i0 and i1 are the same), then
65acccdd
ZD
530 we return a CONST_INT for i0.
531 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
532 if (mode != VOIDmode)
533 {
5b0264cb
NS
534 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
535 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
536 /* We can get a 0 for an error mark. */
537 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
538 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 539
65acccdd
ZD
540 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
541 return gen_int_mode (i0, mode);
542
543 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
5692c7bc
ZW
544 }
545
546 /* If this integer fits in one word, return a CONST_INT. */
547 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
548 return GEN_INT (i0);
549
550 /* We use VOIDmode for integers. */
551 value = rtx_alloc (CONST_DOUBLE);
552 PUT_MODE (value, VOIDmode);
553
554 CONST_DOUBLE_LOW (value) = i0;
555 CONST_DOUBLE_HIGH (value) = i1;
556
557 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
558 XWINT (value, i) = 0;
559
560 return lookup_const_double (value);
0133b7d9
RH
561}
562
3b80f6ca 563rtx
502b8322 564gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
565{
566 /* In case the MD file explicitly references the frame pointer, have
567 all such references point to the same frame pointer. This is
568 used during frame pointer elimination to distinguish the explicit
569 references to these registers from pseudos that happened to be
570 assigned to them.
571
572 If we have eliminated the frame pointer or arg pointer, we will
573 be using it as a normal register, for example as a spill
574 register. In such cases, we might be accessing it in a mode that
575 is not Pmode and therefore cannot use the pre-allocated rtx.
576
577 Also don't do this when we are making new REGs in reload, since
578 we don't want to get confused with the real pointers. */
579
580 if (mode == Pmode && !reload_in_progress)
581 {
e10c79fe
LB
582 if (regno == FRAME_POINTER_REGNUM
583 && (!reload_completed || frame_pointer_needed))
3b80f6ca 584 return frame_pointer_rtx;
e3339d0f 585#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
586 if (regno == HARD_FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
588 return hard_frame_pointer_rtx;
589#endif
e3339d0f 590#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 591 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
592 return arg_pointer_rtx;
593#endif
594#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 595 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
596 return return_address_pointer_rtx;
597#endif
fc555370 598 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 599 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 600 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 601 return pic_offset_table_rtx;
bcb33994 602 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
603 return stack_pointer_rtx;
604 }
605
006a94b0 606#if 0
6cde4876 607 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
608 an existing entry in that table to avoid useless generation of RTL.
609
610 This code is disabled for now until we can fix the various backends
611 which depend on having non-shared hard registers in some cases. Long
612 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
613 on the amount of useless RTL that gets generated.
614
615 We'll also need to fix some code that runs after reload that wants to
616 set ORIGINAL_REGNO. */
617
6cde4876
JL
618 if (cfun
619 && cfun->emit
620 && regno_reg_rtx
621 && regno < FIRST_PSEUDO_REGISTER
622 && reg_raw_mode[regno] == mode)
623 return regno_reg_rtx[regno];
006a94b0 624#endif
6cde4876 625
08394eef 626 return gen_raw_REG (mode, regno);
3b80f6ca
RH
627}
628
41472af8 629rtx
502b8322 630gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
631{
632 rtx rt = gen_rtx_raw_MEM (mode, addr);
633
634 /* This field is not cleared by the mere allocation of the rtx, so
635 we clear it here. */
173b24b9 636 MEM_ATTRS (rt) = 0;
41472af8
MM
637
638 return rt;
639}
ddef6bc7 640
542a8afa
RH
641/* Generate a memory referring to non-trapping constant memory. */
642
643rtx
644gen_const_mem (enum machine_mode mode, rtx addr)
645{
646 rtx mem = gen_rtx_MEM (mode, addr);
647 MEM_READONLY_P (mem) = 1;
648 MEM_NOTRAP_P (mem) = 1;
649 return mem;
650}
651
bf877a76
R
652/* Generate a MEM referring to fixed portions of the frame, e.g., register
653 save areas. */
654
655rtx
656gen_frame_mem (enum machine_mode mode, rtx addr)
657{
658 rtx mem = gen_rtx_MEM (mode, addr);
659 MEM_NOTRAP_P (mem) = 1;
660 set_mem_alias_set (mem, get_frame_alias_set ());
661 return mem;
662}
663
664/* Generate a MEM referring to a temporary use of the stack, not part
665 of the fixed stack frame. For example, something which is pushed
666 by a target splitter. */
667rtx
668gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
669{
670 rtx mem = gen_rtx_MEM (mode, addr);
671 MEM_NOTRAP_P (mem) = 1;
e3b5732b 672 if (!cfun->calls_alloca)
bf877a76
R
673 set_mem_alias_set (mem, get_frame_alias_set ());
674 return mem;
675}
676
beb72684
RH
677/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
678 this construct would be valid, and false otherwise. */
679
680bool
681validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 682 const_rtx reg, unsigned int offset)
ddef6bc7 683{
beb72684
RH
684 unsigned int isize = GET_MODE_SIZE (imode);
685 unsigned int osize = GET_MODE_SIZE (omode);
686
687 /* All subregs must be aligned. */
688 if (offset % osize != 0)
689 return false;
690
691 /* The subreg offset cannot be outside the inner object. */
692 if (offset >= isize)
693 return false;
694
695 /* ??? This should not be here. Temporarily continue to allow word_mode
696 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
697 Generally, backends are doing something sketchy but it'll take time to
698 fix them all. */
699 if (omode == word_mode)
700 ;
701 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
702 is the culprit here, and not the backends. */
703 else if (osize >= UNITS_PER_WORD && isize >= osize)
704 ;
705 /* Allow component subregs of complex and vector. Though given the below
706 extraction rules, it's not always clear what that means. */
707 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
708 && GET_MODE_INNER (imode) == omode)
709 ;
710 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
711 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
712 represent this. It's questionable if this ought to be represented at
713 all -- why can't this all be hidden in post-reload splitters that make
714 arbitrarily mode changes to the registers themselves. */
715 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
716 ;
717 /* Subregs involving floating point modes are not allowed to
718 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
719 (subreg:SI (reg:DF) 0) isn't. */
720 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
721 {
722 if (isize != osize)
723 return false;
724 }
ddef6bc7 725
beb72684
RH
726 /* Paradoxical subregs must have offset zero. */
727 if (osize > isize)
728 return offset == 0;
729
730 /* This is a normal subreg. Verify that the offset is representable. */
731
732 /* For hard registers, we already have most of these rules collected in
733 subreg_offset_representable_p. */
734 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
735 {
736 unsigned int regno = REGNO (reg);
737
738#ifdef CANNOT_CHANGE_MODE_CLASS
739 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
740 && GET_MODE_INNER (imode) == omode)
741 ;
742 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
743 return false;
ddef6bc7 744#endif
beb72684
RH
745
746 return subreg_offset_representable_p (regno, imode, offset, omode);
747 }
748
749 /* For pseudo registers, we want most of the same checks. Namely:
750 If the register no larger than a word, the subreg must be lowpart.
751 If the register is larger than a word, the subreg must be the lowpart
752 of a subword. A subreg does *not* perform arbitrary bit extraction.
753 Given that we've already checked mode/offset alignment, we only have
754 to check subword subregs here. */
755 if (osize < UNITS_PER_WORD)
756 {
757 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
758 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
759 if (offset % UNITS_PER_WORD != low_off)
760 return false;
761 }
762 return true;
763}
764
765rtx
766gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
767{
768 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 769 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
770}
771
173b24b9
RK
772/* Generate a SUBREG representing the least-significant part of REG if MODE
773 is smaller than mode of REG, otherwise paradoxical SUBREG. */
774
ddef6bc7 775rtx
502b8322 776gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
777{
778 enum machine_mode inmode;
ddef6bc7
JJ
779
780 inmode = GET_MODE (reg);
781 if (inmode == VOIDmode)
782 inmode = mode;
e0e08ac2
JH
783 return gen_rtx_SUBREG (mode, reg,
784 subreg_lowpart_offset (mode, inmode));
ddef6bc7 785}
c5c76735 786\f
23b2ce53 787
80379f51
PB
788/* Create an rtvec and stores within it the RTXen passed in the arguments. */
789
23b2ce53 790rtvec
e34d07f2 791gen_rtvec (int n, ...)
23b2ce53 792{
80379f51
PB
793 int i;
794 rtvec rt_val;
e34d07f2 795 va_list p;
23b2ce53 796
e34d07f2 797 va_start (p, n);
23b2ce53 798
80379f51 799 /* Don't allocate an empty rtvec... */
23b2ce53 800 if (n == 0)
0edf1bb2
JL
801 {
802 va_end (p);
803 return NULL_RTVEC;
804 }
23b2ce53 805
80379f51 806 rt_val = rtvec_alloc (n);
4f90e4a0 807
23b2ce53 808 for (i = 0; i < n; i++)
80379f51 809 rt_val->elem[i] = va_arg (p, rtx);
6268b922 810
e34d07f2 811 va_end (p);
80379f51 812 return rt_val;
23b2ce53
RS
813}
814
815rtvec
502b8322 816gen_rtvec_v (int n, rtx *argp)
23b2ce53 817{
b3694847
SS
818 int i;
819 rtvec rt_val;
23b2ce53 820
80379f51 821 /* Don't allocate an empty rtvec... */
23b2ce53 822 if (n == 0)
80379f51 823 return NULL_RTVEC;
23b2ce53 824
80379f51 825 rt_val = rtvec_alloc (n);
23b2ce53
RS
826
827 for (i = 0; i < n; i++)
8f985ec4 828 rt_val->elem[i] = *argp++;
23b2ce53
RS
829
830 return rt_val;
831}
832\f
38ae7651
RS
833/* Return the number of bytes between the start of an OUTER_MODE
834 in-memory value and the start of an INNER_MODE in-memory value,
835 given that the former is a lowpart of the latter. It may be a
836 paradoxical lowpart, in which case the offset will be negative
837 on big-endian targets. */
838
839int
840byte_lowpart_offset (enum machine_mode outer_mode,
841 enum machine_mode inner_mode)
842{
843 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
844 return subreg_lowpart_offset (outer_mode, inner_mode);
845 else
846 return -subreg_lowpart_offset (inner_mode, outer_mode);
847}
848\f
23b2ce53
RS
849/* Generate a REG rtx for a new pseudo register of mode MODE.
850 This pseudo is assigned the next sequential register number. */
851
852rtx
502b8322 853gen_reg_rtx (enum machine_mode mode)
23b2ce53 854{
b3694847 855 rtx val;
2e3f842f 856 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 857
f8335a4f 858 gcc_assert (can_create_pseudo_p ());
23b2ce53 859
2e3f842f
L
860 /* If a virtual register with bigger mode alignment is generated,
861 increase stack alignment estimation because it might be spilled
862 to stack later. */
b8698a0f 863 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
864 && crtl->stack_alignment_estimated < align
865 && !crtl->stack_realign_processed)
ae58e548
JJ
866 {
867 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
868 if (crtl->stack_alignment_estimated < min_align)
869 crtl->stack_alignment_estimated = min_align;
870 }
2e3f842f 871
1b3d8f8a
GK
872 if (generating_concat_p
873 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
874 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
875 {
876 /* For complex modes, don't make a single pseudo.
877 Instead, make a CONCAT of two pseudos.
878 This allows noncontiguous allocation of the real and imaginary parts,
879 which makes much better code. Besides, allocating DCmode
880 pseudos overstrains reload on some machines like the 386. */
881 rtx realpart, imagpart;
27e58a70 882 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
883
884 realpart = gen_reg_rtx (partmode);
885 imagpart = gen_reg_rtx (partmode);
3b80f6ca 886 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
887 }
888
a560d4d4 889 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 890 enough to have an element for this pseudo reg number. */
23b2ce53 891
3e029763 892 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 893 {
3e029763 894 int old_size = crtl->emit.regno_pointer_align_length;
60564289 895 char *tmp;
0d4903b8 896 rtx *new1;
0d4903b8 897
60564289
KG
898 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
899 memset (tmp + old_size, 0, old_size);
900 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 901
1b4572a8 902 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 903 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
904 regno_reg_rtx = new1;
905
3e029763 906 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
907 }
908
08394eef 909 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
910 regno_reg_rtx[reg_rtx_no++] = val;
911 return val;
912}
913
38ae7651
RS
914/* Update NEW with the same attributes as REG, but with OFFSET added
915 to the REG_OFFSET. */
a560d4d4 916
e53a16e7 917static void
60564289 918update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 919{
60564289 920 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 921 REG_OFFSET (reg) + offset);
e53a16e7
ILT
922}
923
38ae7651
RS
924/* Generate a register with same attributes as REG, but with OFFSET
925 added to the REG_OFFSET. */
e53a16e7
ILT
926
927rtx
928gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
929 int offset)
930{
60564289 931 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 932
60564289
KG
933 update_reg_offset (new_rtx, reg, offset);
934 return new_rtx;
e53a16e7
ILT
935}
936
937/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 938 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
939
940rtx
941gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
942{
60564289 943 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 944
60564289
KG
945 update_reg_offset (new_rtx, reg, offset);
946 return new_rtx;
a560d4d4
JH
947}
948
38ae7651
RS
949/* Adjust REG in-place so that it has mode MODE. It is assumed that the
950 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
951
952void
38ae7651 953adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 954{
38ae7651
RS
955 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
956 PUT_MODE (reg, mode);
957}
958
959/* Copy REG's attributes from X, if X has any attributes. If REG and X
960 have different modes, REG is a (possibly paradoxical) lowpart of X. */
961
962void
963set_reg_attrs_from_value (rtx reg, rtx x)
964{
965 int offset;
966
923ba36f
JJ
967 /* Hard registers can be reused for multiple purposes within the same
968 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
969 on them is wrong. */
970 if (HARD_REGISTER_P (reg))
971 return;
972
38ae7651 973 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
974 if (MEM_P (x))
975 {
527210c4
RS
976 if (MEM_OFFSET_KNOWN_P (x))
977 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
978 MEM_OFFSET (x) + offset);
46b71b03 979 if (MEM_POINTER (x))
0a317111 980 mark_reg_pointer (reg, 0);
46b71b03
PB
981 }
982 else if (REG_P (x))
983 {
984 if (REG_ATTRS (x))
985 update_reg_offset (reg, x, offset);
986 if (REG_POINTER (x))
987 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
988 }
989}
990
991/* Generate a REG rtx for a new pseudo register, copying the mode
992 and attributes from X. */
993
994rtx
995gen_reg_rtx_and_attrs (rtx x)
996{
997 rtx reg = gen_reg_rtx (GET_MODE (x));
998 set_reg_attrs_from_value (reg, x);
999 return reg;
a560d4d4
JH
1000}
1001
9d18e06b
JZ
1002/* Set the register attributes for registers contained in PARM_RTX.
1003 Use needed values from memory attributes of MEM. */
1004
1005void
502b8322 1006set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1007{
f8cfc6aa 1008 if (REG_P (parm_rtx))
38ae7651 1009 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1010 else if (GET_CODE (parm_rtx) == PARALLEL)
1011 {
1012 /* Check for a NULL entry in the first slot, used to indicate that the
1013 parameter goes both on the stack and in registers. */
1014 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1015 for (; i < XVECLEN (parm_rtx, 0); i++)
1016 {
1017 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1018 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1019 REG_ATTRS (XEXP (x, 0))
1020 = get_reg_attrs (MEM_EXPR (mem),
1021 INTVAL (XEXP (x, 1)));
1022 }
1023 }
1024}
1025
38ae7651
RS
1026/* Set the REG_ATTRS for registers in value X, given that X represents
1027 decl T. */
a560d4d4 1028
4e3825db 1029void
38ae7651
RS
1030set_reg_attrs_for_decl_rtl (tree t, rtx x)
1031{
1032 if (GET_CODE (x) == SUBREG)
fbe6ec81 1033 {
38ae7651
RS
1034 gcc_assert (subreg_lowpart_p (x));
1035 x = SUBREG_REG (x);
fbe6ec81 1036 }
f8cfc6aa 1037 if (REG_P (x))
38ae7651
RS
1038 REG_ATTRS (x)
1039 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1040 DECL_MODE (t)));
a560d4d4
JH
1041 if (GET_CODE (x) == CONCAT)
1042 {
1043 if (REG_P (XEXP (x, 0)))
1044 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1045 if (REG_P (XEXP (x, 1)))
1046 REG_ATTRS (XEXP (x, 1))
1047 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1048 }
1049 if (GET_CODE (x) == PARALLEL)
1050 {
d4afac5b
JZ
1051 int i, start;
1052
1053 /* Check for a NULL entry, used to indicate that the parameter goes
1054 both on the stack and in registers. */
1055 if (XEXP (XVECEXP (x, 0, 0), 0))
1056 start = 0;
1057 else
1058 start = 1;
1059
1060 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1061 {
1062 rtx y = XVECEXP (x, 0, i);
1063 if (REG_P (XEXP (y, 0)))
1064 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1065 }
1066 }
1067}
1068
38ae7651
RS
1069/* Assign the RTX X to declaration T. */
1070
1071void
1072set_decl_rtl (tree t, rtx x)
1073{
1074 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1075 if (x)
1076 set_reg_attrs_for_decl_rtl (t, x);
1077}
1078
5141868d
RS
1079/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1080 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1081
1082void
5141868d 1083set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1084{
1085 DECL_INCOMING_RTL (t) = x;
5141868d 1086 if (x && !by_reference_p)
38ae7651
RS
1087 set_reg_attrs_for_decl_rtl (t, x);
1088}
1089
754fdcca
RK
1090/* Identify REG (which may be a CONCAT) as a user register. */
1091
1092void
502b8322 1093mark_user_reg (rtx reg)
754fdcca
RK
1094{
1095 if (GET_CODE (reg) == CONCAT)
1096 {
1097 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1098 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1099 }
754fdcca 1100 else
5b0264cb
NS
1101 {
1102 gcc_assert (REG_P (reg));
1103 REG_USERVAR_P (reg) = 1;
1104 }
754fdcca
RK
1105}
1106
86fe05e0
RK
1107/* Identify REG as a probable pointer register and show its alignment
1108 as ALIGN, if nonzero. */
23b2ce53
RS
1109
1110void
502b8322 1111mark_reg_pointer (rtx reg, int align)
23b2ce53 1112{
3502dc9c 1113 if (! REG_POINTER (reg))
00995e78 1114 {
3502dc9c 1115 REG_POINTER (reg) = 1;
86fe05e0 1116
00995e78
RE
1117 if (align)
1118 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1119 }
1120 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1121 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1122 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1123}
1124
1125/* Return 1 plus largest pseudo reg number used in the current function. */
1126
1127int
502b8322 1128max_reg_num (void)
23b2ce53
RS
1129{
1130 return reg_rtx_no;
1131}
1132
1133/* Return 1 + the largest label number used so far in the current function. */
1134
1135int
502b8322 1136max_label_num (void)
23b2ce53 1137{
23b2ce53
RS
1138 return label_num;
1139}
1140
1141/* Return first label number used in this function (if any were used). */
1142
1143int
502b8322 1144get_first_label_num (void)
23b2ce53
RS
1145{
1146 return first_label_num;
1147}
6de9cd9a
DN
1148
1149/* If the rtx for label was created during the expansion of a nested
1150 function, then first_label_num won't include this label number.
fa10beec 1151 Fix this now so that array indices work later. */
6de9cd9a
DN
1152
1153void
1154maybe_set_first_label_num (rtx x)
1155{
1156 if (CODE_LABEL_NUMBER (x) < first_label_num)
1157 first_label_num = CODE_LABEL_NUMBER (x);
1158}
23b2ce53
RS
1159\f
1160/* Return a value representing some low-order bits of X, where the number
1161 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1162 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1163 representation is returned.
1164
1165 This function handles the cases in common between gen_lowpart, below,
1166 and two variants in cse.c and combine.c. These are the cases that can
1167 be safely handled at all points in the compilation.
1168
1169 If this is not a case we can handle, return 0. */
1170
1171rtx
502b8322 1172gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1173{
ddef6bc7 1174 int msize = GET_MODE_SIZE (mode);
550d1387 1175 int xsize;
ddef6bc7 1176 int offset = 0;
550d1387
GK
1177 enum machine_mode innermode;
1178
1179 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1180 so we have to make one up. Yuk. */
1181 innermode = GET_MODE (x);
481683e1 1182 if (CONST_INT_P (x)
db487452 1183 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1184 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1185 else if (innermode == VOIDmode)
1186 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
b8698a0f 1187
550d1387
GK
1188 xsize = GET_MODE_SIZE (innermode);
1189
5b0264cb 1190 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1191
550d1387 1192 if (innermode == mode)
23b2ce53
RS
1193 return x;
1194
1195 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1196 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1197 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1198 return 0;
1199
53501a19 1200 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1201 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1202 return 0;
1203
550d1387 1204 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1205
1206 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1207 && (GET_MODE_CLASS (mode) == MODE_INT
1208 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1209 {
1210 /* If we are getting the low-order part of something that has been
1211 sign- or zero-extended, we can either just use the object being
1212 extended or make a narrower extension. If we want an even smaller
1213 piece than the size of the object being extended, call ourselves
1214 recursively.
1215
1216 This case is used mostly by combine and cse. */
1217
1218 if (GET_MODE (XEXP (x, 0)) == mode)
1219 return XEXP (x, 0);
550d1387 1220 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1221 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1222 else if (msize < xsize)
3b80f6ca 1223 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1224 }
f8cfc6aa 1225 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1226 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
481683e1 1227 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
550d1387 1228 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1229
23b2ce53
RS
1230 /* Otherwise, we can't do this. */
1231 return 0;
1232}
1233\f
ccba022b 1234rtx
502b8322 1235gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1236{
ddef6bc7 1237 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1238 rtx result;
ddef6bc7 1239
ccba022b
RS
1240 /* This case loses if X is a subreg. To catch bugs early,
1241 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1242 gcc_assert (msize <= UNITS_PER_WORD
1243 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1244
e0e08ac2
JH
1245 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1246 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1247 gcc_assert (result);
b8698a0f 1248
09482e0d
JW
1249 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1250 the target if we have a MEM. gen_highpart must return a valid operand,
1251 emitting code if necessary to do so. */
5b0264cb
NS
1252 if (MEM_P (result))
1253 {
1254 result = validize_mem (result);
1255 gcc_assert (result);
1256 }
b8698a0f 1257
e0e08ac2
JH
1258 return result;
1259}
5222e470 1260
26d249eb 1261/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1262 be VOIDmode constant. */
1263rtx
502b8322 1264gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1265{
1266 if (GET_MODE (exp) != VOIDmode)
1267 {
5b0264cb 1268 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1269 return gen_highpart (outermode, exp);
1270 }
1271 return simplify_gen_subreg (outermode, exp, innermode,
1272 subreg_highpart_offset (outermode, innermode));
1273}
68252e27 1274
38ae7651 1275/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1276
e0e08ac2 1277unsigned int
502b8322 1278subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1279{
1280 unsigned int offset = 0;
1281 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1282
e0e08ac2 1283 if (difference > 0)
ccba022b 1284 {
e0e08ac2
JH
1285 if (WORDS_BIG_ENDIAN)
1286 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1287 if (BYTES_BIG_ENDIAN)
1288 offset += difference % UNITS_PER_WORD;
ccba022b 1289 }
ddef6bc7 1290
e0e08ac2 1291 return offset;
ccba022b 1292}
eea50aa0 1293
e0e08ac2
JH
1294/* Return offset in bytes to get OUTERMODE high part
1295 of the value in mode INNERMODE stored in memory in target format. */
1296unsigned int
502b8322 1297subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1298{
1299 unsigned int offset = 0;
1300 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1301
5b0264cb 1302 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1303
eea50aa0
JH
1304 if (difference > 0)
1305 {
e0e08ac2 1306 if (! WORDS_BIG_ENDIAN)
eea50aa0 1307 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1308 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1309 offset += difference % UNITS_PER_WORD;
1310 }
1311
e0e08ac2 1312 return offset;
eea50aa0 1313}
ccba022b 1314
23b2ce53
RS
1315/* Return 1 iff X, assumed to be a SUBREG,
1316 refers to the least significant part of its containing reg.
1317 If X is not a SUBREG, always return 1 (it is its own low part!). */
1318
1319int
fa233e34 1320subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1321{
1322 if (GET_CODE (x) != SUBREG)
1323 return 1;
a3a03040
RK
1324 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1325 return 0;
23b2ce53 1326
e0e08ac2
JH
1327 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1328 == SUBREG_BYTE (x));
23b2ce53 1329}
6a4bdc79
BS
1330
1331/* Return true if X is a paradoxical subreg, false otherwise. */
1332bool
1333paradoxical_subreg_p (const_rtx x)
1334{
1335 if (GET_CODE (x) != SUBREG)
1336 return false;
1337 return (GET_MODE_PRECISION (GET_MODE (x))
1338 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1339}
23b2ce53 1340\f
ddef6bc7
JJ
1341/* Return subword OFFSET of operand OP.
1342 The word number, OFFSET, is interpreted as the word number starting
1343 at the low-order address. OFFSET 0 is the low-order word if not
1344 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1345
1346 If we cannot extract the required word, we return zero. Otherwise,
1347 an rtx corresponding to the requested word will be returned.
1348
1349 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1350 reload has completed, a valid address will always be returned. After
1351 reload, if a valid address cannot be returned, we return zero.
1352
1353 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1354 it is the responsibility of the caller.
1355
1356 MODE is the mode of OP in case it is a CONST_INT.
1357
1358 ??? This is still rather broken for some cases. The problem for the
1359 moment is that all callers of this thing provide no 'goal mode' to
1360 tell us to work with. This exists because all callers were written
0631e0bf
JH
1361 in a word based SUBREG world.
1362 Now use of this function can be deprecated by simplify_subreg in most
1363 cases.
1364 */
ddef6bc7
JJ
1365
1366rtx
502b8322 1367operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1368{
1369 if (mode == VOIDmode)
1370 mode = GET_MODE (op);
1371
5b0264cb 1372 gcc_assert (mode != VOIDmode);
ddef6bc7 1373
30f7a378 1374 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1375 if (mode != BLKmode
1376 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1377 return 0;
1378
30f7a378 1379 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1380 if (mode != BLKmode
1381 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1382 return const0_rtx;
1383
ddef6bc7 1384 /* Form a new MEM at the requested address. */
3c0cb5de 1385 if (MEM_P (op))
ddef6bc7 1386 {
60564289 1387 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1388
f1ec5147 1389 if (! validate_address)
60564289 1390 return new_rtx;
f1ec5147
RK
1391
1392 else if (reload_completed)
ddef6bc7 1393 {
09e881c9
BE
1394 if (! strict_memory_address_addr_space_p (word_mode,
1395 XEXP (new_rtx, 0),
1396 MEM_ADDR_SPACE (op)))
f1ec5147 1397 return 0;
ddef6bc7 1398 }
f1ec5147 1399 else
60564289 1400 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1401 }
1402
0631e0bf
JH
1403 /* Rest can be handled by simplify_subreg. */
1404 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1405}
1406
535a42b1
NS
1407/* Similar to `operand_subword', but never return 0. If we can't
1408 extract the required subword, put OP into a register and try again.
1409 The second attempt must succeed. We always validate the address in
1410 this case.
23b2ce53
RS
1411
1412 MODE is the mode of OP, in case it is CONST_INT. */
1413
1414rtx
502b8322 1415operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1416{
ddef6bc7 1417 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1418
1419 if (result)
1420 return result;
1421
1422 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1423 {
1424 /* If this is a register which can not be accessed by words, copy it
1425 to a pseudo register. */
f8cfc6aa 1426 if (REG_P (op))
77e6b0eb
JC
1427 op = copy_to_reg (op);
1428 else
1429 op = force_reg (mode, op);
1430 }
23b2ce53 1431
ddef6bc7 1432 result = operand_subword (op, offset, 1, mode);
5b0264cb 1433 gcc_assert (result);
23b2ce53
RS
1434
1435 return result;
1436}
1437\f
2b3493c8
AK
1438/* Returns 1 if both MEM_EXPR can be considered equal
1439 and 0 otherwise. */
1440
1441int
4f588890 1442mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1443{
1444 if (expr1 == expr2)
1445 return 1;
1446
1447 if (! expr1 || ! expr2)
1448 return 0;
1449
1450 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1451 return 0;
1452
55b34b5f 1453 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1454}
1455
805903b5
JJ
1456/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1457 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1458 -1 if not known. */
1459
1460int
d9223014 1461get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1462{
1463 tree expr;
1464 unsigned HOST_WIDE_INT offset;
1465
1466 /* This function can't use
527210c4 1467 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726
RG
1468 || (MAX (MEM_ALIGN (mem),
1469 get_object_alignment (MEM_EXPR (mem), align))
805903b5
JJ
1470 < align))
1471 return -1;
1472 else
527210c4 1473 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1474 for two reasons:
1475 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1476 for <variable>. get_inner_reference doesn't handle it and
1477 even if it did, the alignment in that case needs to be determined
1478 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1479 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1480 isn't sufficiently aligned, the object it is in might be. */
1481 gcc_assert (MEM_P (mem));
1482 expr = MEM_EXPR (mem);
527210c4 1483 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1484 return -1;
1485
527210c4 1486 offset = MEM_OFFSET (mem);
805903b5
JJ
1487 if (DECL_P (expr))
1488 {
1489 if (DECL_ALIGN (expr) < align)
1490 return -1;
1491 }
1492 else if (INDIRECT_REF_P (expr))
1493 {
1494 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1495 return -1;
1496 }
1497 else if (TREE_CODE (expr) == COMPONENT_REF)
1498 {
1499 while (1)
1500 {
1501 tree inner = TREE_OPERAND (expr, 0);
1502 tree field = TREE_OPERAND (expr, 1);
1503 tree byte_offset = component_ref_field_offset (expr);
1504 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1505
1506 if (!byte_offset
1507 || !host_integerp (byte_offset, 1)
1508 || !host_integerp (bit_offset, 1))
1509 return -1;
1510
1511 offset += tree_low_cst (byte_offset, 1);
1512 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1513
1514 if (inner == NULL_TREE)
1515 {
1516 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1517 < (unsigned int) align)
1518 return -1;
1519 break;
1520 }
1521 else if (DECL_P (inner))
1522 {
1523 if (DECL_ALIGN (inner) < align)
1524 return -1;
1525 break;
1526 }
1527 else if (TREE_CODE (inner) != COMPONENT_REF)
1528 return -1;
1529 expr = inner;
1530 }
1531 }
1532 else
1533 return -1;
1534
1535 return offset & ((align / BITS_PER_UNIT) - 1);
1536}
1537
6926c713 1538/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1539 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1540 if we are making a new object of this type. BITPOS is nonzero if
1541 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1542
1543void
502b8322
AJ
1544set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1545 HOST_WIDE_INT bitpos)
173b24b9 1546{
6f1087be 1547 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1548 tree type;
f12144dd 1549 struct mem_attrs attrs, *defattrs, *refattrs;
173b24b9
RK
1550
1551 /* It can happen that type_for_mode was given a mode for which there
1552 is no language-level type. In which case it returns NULL, which
1553 we can see here. */
1554 if (t == NULL_TREE)
1555 return;
1556
1557 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1558 if (type == error_mark_node)
1559 return;
173b24b9 1560
173b24b9
RK
1561 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1562 wrong answer, as it assumes that DECL_RTL already has the right alias
1563 info. Callers should not set DECL_RTL until after the call to
1564 set_mem_attributes. */
5b0264cb 1565 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1566
f12144dd
RS
1567 memset (&attrs, 0, sizeof (attrs));
1568
738cc472 1569 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1570 front-end routine) and use it. */
f12144dd 1571 attrs.alias = get_alias_set (t);
173b24b9 1572
a5e9c810 1573 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
07cb6e8c
JM
1574 MEM_IN_STRUCT_P (ref)
1575 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
f8ad8d7c 1576 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1577
8ac61af7
RK
1578 /* If we are making an object of this type, or if this is a DECL, we know
1579 that it is a scalar if the type is not an aggregate. */
07cb6e8c
JM
1580 if ((objectp || DECL_P (t))
1581 && ! AGGREGATE_TYPE_P (type)
1582 && TREE_CODE (type) != COMPLEX_TYPE)
173b24b9
RK
1583 MEM_SCALAR_P (ref) = 1;
1584
268f7033 1585 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1586 refattrs = MEM_ATTRS (ref);
1587 if (refattrs)
268f7033
UW
1588 {
1589 /* ??? Can this ever happen? Calling this routine on a MEM that
1590 already carries memory attributes should probably be invalid. */
f12144dd 1591 attrs.expr = refattrs->expr;
754c3d5d 1592 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1593 attrs.offset = refattrs->offset;
754c3d5d 1594 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1595 attrs.size = refattrs->size;
1596 attrs.align = refattrs->align;
268f7033
UW
1597 }
1598
1599 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1600 else
268f7033 1601 {
f12144dd
RS
1602 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1603 gcc_assert (!defattrs->expr);
754c3d5d 1604 gcc_assert (!defattrs->offset_known_p);
f12144dd 1605
268f7033 1606 /* Respect mode size. */
754c3d5d 1607 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1608 attrs.size = defattrs->size;
268f7033
UW
1609 /* ??? Is this really necessary? We probably should always get
1610 the size from the type below. */
1611
1612 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1613 if T is an object, always compute the object alignment below. */
f12144dd
RS
1614 if (TYPE_P (t))
1615 attrs.align = defattrs->align;
1616 else
1617 attrs.align = BITS_PER_UNIT;
268f7033
UW
1618 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1619 e.g. if the type carries an alignment attribute. Should we be
1620 able to simply always use TYPE_ALIGN? */
1621 }
1622
c3d32120
RK
1623 /* We can set the alignment from the type if we are making an object,
1624 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1625 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1626 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1627
70f34814
RG
1628 else if (TREE_CODE (t) == MEM_REF)
1629 {
a80903ff 1630 tree op0 = TREE_OPERAND (t, 0);
3e32c761
RG
1631 if (TREE_CODE (op0) == ADDR_EXPR
1632 && (DECL_P (TREE_OPERAND (op0, 0))
1633 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
70f34814 1634 {
3e32c761 1635 if (DECL_P (TREE_OPERAND (op0, 0)))
f12144dd 1636 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
3e32c761
RG
1637 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1638 {
f12144dd 1639 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
70f34814 1640#ifdef CONSTANT_ALIGNMENT
f12144dd
RS
1641 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1642 attrs.align);
70f34814 1643#endif
3e32c761
RG
1644 }
1645 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1646 {
1647 unsigned HOST_WIDE_INT ioff
1648 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1649 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
f12144dd 1650 attrs.align = MIN (aoff, attrs.align);
3e32c761 1651 }
70f34814
RG
1652 }
1653 else
5951297a
EB
1654 /* ??? This isn't fully correct, we can't set the alignment from the
1655 type in all cases. */
f12144dd 1656 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
70f34814 1657 }
a80903ff 1658
9407f6bc
RG
1659 else if (TREE_CODE (t) == TARGET_MEM_REF)
1660 /* ??? This isn't fully correct, we can't set the alignment from the
1661 type in all cases. */
f12144dd 1662 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
9407f6bc 1663
738cc472
RK
1664 /* If the size is known, we can set that. */
1665 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
754c3d5d
RS
1666 {
1667 attrs.size_known_p = true;
1668 attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1669 }
738cc472 1670
80965c18
RK
1671 /* If T is not a type, we may be able to deduce some more information about
1672 the expression. */
1673 if (! TYPE_P (t))
8ac61af7 1674 {
8476af98 1675 tree base;
df96b059 1676 bool align_computed = false;
389fdba0 1677
8ac61af7
RK
1678 if (TREE_THIS_VOLATILE (t))
1679 MEM_VOLATILE_P (ref) = 1;
173b24b9 1680
c56e3582
RK
1681 /* Now remove any conversions: they don't change what the underlying
1682 object is. Likewise for SAVE_EXPR. */
1043771b 1683 while (CONVERT_EXPR_P (t)
c56e3582
RK
1684 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1685 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1686 t = TREE_OPERAND (t, 0);
1687
4994da65
RG
1688 /* Note whether this expression can trap. */
1689 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1690
1691 base = get_base_address (t);
8476af98
RH
1692 if (base && DECL_P (base)
1693 && TREE_READONLY (base)
b1923f0a
RG
1694 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1695 && !TREE_THIS_VOLATILE (base))
21d9971a 1696 MEM_READONLY_P (ref) = 1;
8476af98 1697
2039d7aa
RH
1698 /* If this expression uses it's parent's alias set, mark it such
1699 that we won't change it. */
1700 if (component_uses_parent_alias_set (t))
10b76d73
RK
1701 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1702
8ac61af7
RK
1703 /* If this is a decl, set the attributes of the MEM from it. */
1704 if (DECL_P (t))
1705 {
f12144dd 1706 attrs.expr = t;
754c3d5d
RS
1707 attrs.offset_known_p = true;
1708 attrs.offset = 0;
6f1087be 1709 apply_bitpos = bitpos;
754c3d5d
RS
1710 if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1711 {
1712 attrs.size_known_p = true;
1713 attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1714 }
1715 else
1716 attrs.size_known_p = false;
f12144dd 1717 attrs.align = DECL_ALIGN (t);
df96b059 1718 align_computed = true;
8ac61af7
RK
1719 }
1720
40c0668b 1721 /* If this is a constant, we know the alignment. */
6615c446 1722 else if (CONSTANT_CLASS_P (t))
9ddfb1a7 1723 {
f12144dd 1724 attrs.align = TYPE_ALIGN (type);
9ddfb1a7 1725#ifdef CONSTANT_ALIGNMENT
f12144dd 1726 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
9ddfb1a7 1727#endif
df96b059 1728 align_computed = true;
9ddfb1a7 1729 }
998d7deb
RH
1730
1731 /* If this is a field reference and not a bit-field, record it. */
fa10beec 1732 /* ??? There is some information that can be gleaned from bit-fields,
998d7deb
RH
1733 such as the word offset in the structure that might be modified.
1734 But skip it for now. */
1735 else if (TREE_CODE (t) == COMPONENT_REF
1736 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1737 {
f12144dd 1738 attrs.expr = t;
754c3d5d
RS
1739 attrs.offset_known_p = true;
1740 attrs.offset = 0;
6f1087be 1741 apply_bitpos = bitpos;
998d7deb
RH
1742 /* ??? Any reason the field size would be different than
1743 the size we got from the type? */
1744 }
1745
1746 /* If this is an array reference, look for an outer field reference. */
1747 else if (TREE_CODE (t) == ARRAY_REF)
1748 {
1749 tree off_tree = size_zero_node;
1b1838b6
JW
1750 /* We can't modify t, because we use it at the end of the
1751 function. */
1752 tree t2 = t;
998d7deb
RH
1753
1754 do
1755 {
1b1838b6 1756 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1757 tree low_bound = array_ref_low_bound (t2);
1758 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1759
1760 /* We assume all arrays have sizes that are a multiple of a byte.
1761 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1762 index, then convert to sizetype and multiply by the size of
1763 the array element. */
1764 if (! integer_zerop (low_bound))
4845b383
KH
1765 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1766 index, low_bound);
2567406a 1767
44de5aeb 1768 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1769 size_binop (MULT_EXPR,
1770 fold_convert (sizetype,
1771 index),
44de5aeb
RK
1772 unit_size),
1773 off_tree);
1b1838b6 1774 t2 = TREE_OPERAND (t2, 0);
998d7deb 1775 }
1b1838b6 1776 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1777
1b1838b6 1778 if (DECL_P (t2))
c67a1cf6 1779 {
f12144dd 1780 attrs.expr = t2;
754c3d5d 1781 attrs.offset_known_p = false;
c67a1cf6 1782 if (host_integerp (off_tree, 1))
40cb04f1
RH
1783 {
1784 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1785 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
f12144dd
RS
1786 attrs.align = DECL_ALIGN (t2);
1787 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1788 attrs.align = aoff;
df96b059 1789 align_computed = true;
754c3d5d
RS
1790 attrs.offset_known_p = true;
1791 attrs.offset = ioff;
6f1087be 1792 apply_bitpos = bitpos;
40cb04f1 1793 }
c67a1cf6 1794 }
1b1838b6 1795 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1796 {
f12144dd 1797 attrs.expr = t2;
754c3d5d 1798 attrs.offset_known_p = false;
998d7deb 1799 if (host_integerp (off_tree, 1))
6f1087be 1800 {
754c3d5d
RS
1801 attrs.offset_known_p = true;
1802 attrs.offset = tree_low_cst (off_tree, 1);
6f1087be
RH
1803 apply_bitpos = bitpos;
1804 }
998d7deb
RH
1805 /* ??? Any reason the field size would be different than
1806 the size we got from the type? */
1807 }
56c47f22 1808
56c47f22 1809 /* If this is an indirect reference, record it. */
be1ac4ec 1810 else if (TREE_CODE (t) == MEM_REF)
56c47f22 1811 {
f12144dd 1812 attrs.expr = t;
754c3d5d
RS
1813 attrs.offset_known_p = true;
1814 attrs.offset = 0;
56c47f22
RG
1815 apply_bitpos = bitpos;
1816 }
c67a1cf6
RH
1817 }
1818
56c47f22 1819 /* If this is an indirect reference, record it. */
70f34814 1820 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1821 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1822 {
f12144dd 1823 attrs.expr = t;
754c3d5d
RS
1824 attrs.offset_known_p = true;
1825 attrs.offset = 0;
56c47f22
RG
1826 apply_bitpos = bitpos;
1827 }
1828
df96b059
JJ
1829 if (!align_computed && !INDIRECT_REF_P (t))
1830 {
e80c2726 1831 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
f12144dd 1832 attrs.align = MAX (attrs.align, obj_align);
df96b059 1833 }
8ac61af7
RK
1834 }
1835
15c812e3 1836 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1837 bit position offset. Similarly, increase the size of the accessed
1838 object to contain the negative offset. */
6f1087be 1839 if (apply_bitpos)
8c317c5f 1840 {
754c3d5d
RS
1841 gcc_assert (attrs.offset_known_p);
1842 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1843 if (attrs.size_known_p)
1844 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1845 }
6f1087be 1846
8ac61af7 1847 /* Now set the attributes we computed above. */
d05f3564 1848 attrs.addrspace = TYPE_ADDR_SPACE (type);
f12144dd 1849 set_mem_attrs (ref, &attrs);
8ac61af7
RK
1850
1851 /* If this is already known to be a scalar or aggregate, we are done. */
1852 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1853 return;
1854
8ac61af7
RK
1855 /* If it is a reference into an aggregate, this is part of an aggregate.
1856 Otherwise we don't know. */
173b24b9
RK
1857 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1858 || TREE_CODE (t) == ARRAY_RANGE_REF
1859 || TREE_CODE (t) == BIT_FIELD_REF)
1860 MEM_IN_STRUCT_P (ref) = 1;
1861}
1862
6f1087be 1863void
502b8322 1864set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1865{
1866 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1867}
1868
173b24b9
RK
1869/* Set the alias set of MEM to SET. */
1870
1871void
4862826d 1872set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1873{
f12144dd
RS
1874 struct mem_attrs attrs;
1875
173b24b9 1876 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1877 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
1878 attrs = *get_mem_attrs (mem);
1879 attrs.alias = set;
1880 set_mem_attrs (mem, &attrs);
09e881c9
BE
1881}
1882
1883/* Set the address space of MEM to ADDRSPACE (target-defined). */
1884
1885void
1886set_mem_addr_space (rtx mem, addr_space_t addrspace)
1887{
f12144dd
RS
1888 struct mem_attrs attrs;
1889
1890 attrs = *get_mem_attrs (mem);
1891 attrs.addrspace = addrspace;
1892 set_mem_attrs (mem, &attrs);
173b24b9 1893}
738cc472 1894
d022d93e 1895/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1896
1897void
502b8322 1898set_mem_align (rtx mem, unsigned int align)
738cc472 1899{
f12144dd
RS
1900 struct mem_attrs attrs;
1901
1902 attrs = *get_mem_attrs (mem);
1903 attrs.align = align;
1904 set_mem_attrs (mem, &attrs);
738cc472 1905}
1285011e 1906
998d7deb 1907/* Set the expr for MEM to EXPR. */
1285011e
RK
1908
1909void
502b8322 1910set_mem_expr (rtx mem, tree expr)
1285011e 1911{
f12144dd
RS
1912 struct mem_attrs attrs;
1913
1914 attrs = *get_mem_attrs (mem);
1915 attrs.expr = expr;
1916 set_mem_attrs (mem, &attrs);
1285011e 1917}
998d7deb
RH
1918
1919/* Set the offset of MEM to OFFSET. */
1920
1921void
527210c4 1922set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 1923{
f12144dd
RS
1924 struct mem_attrs attrs;
1925
1926 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1927 attrs.offset_known_p = true;
1928 attrs.offset = offset;
527210c4
RS
1929 set_mem_attrs (mem, &attrs);
1930}
1931
1932/* Clear the offset of MEM. */
1933
1934void
1935clear_mem_offset (rtx mem)
1936{
1937 struct mem_attrs attrs;
1938
1939 attrs = *get_mem_attrs (mem);
754c3d5d 1940 attrs.offset_known_p = false;
f12144dd 1941 set_mem_attrs (mem, &attrs);
35aff10b
AM
1942}
1943
1944/* Set the size of MEM to SIZE. */
1945
1946void
f5541398 1947set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 1948{
f12144dd
RS
1949 struct mem_attrs attrs;
1950
1951 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1952 attrs.size_known_p = true;
1953 attrs.size = size;
f5541398
RS
1954 set_mem_attrs (mem, &attrs);
1955}
1956
1957/* Clear the size of MEM. */
1958
1959void
1960clear_mem_size (rtx mem)
1961{
1962 struct mem_attrs attrs;
1963
1964 attrs = *get_mem_attrs (mem);
754c3d5d 1965 attrs.size_known_p = false;
f12144dd 1966 set_mem_attrs (mem, &attrs);
998d7deb 1967}
173b24b9 1968\f
738cc472
RK
1969/* Return a memory reference like MEMREF, but with its mode changed to MODE
1970 and its address changed to ADDR. (VOIDmode means don't change the mode.
1971 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1972 returned memory location is required to be valid. The memory
1973 attributes are not changed. */
23b2ce53 1974
738cc472 1975static rtx
502b8322 1976change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1977{
09e881c9 1978 addr_space_t as;
60564289 1979 rtx new_rtx;
23b2ce53 1980
5b0264cb 1981 gcc_assert (MEM_P (memref));
09e881c9 1982 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
1983 if (mode == VOIDmode)
1984 mode = GET_MODE (memref);
1985 if (addr == 0)
1986 addr = XEXP (memref, 0);
a74ff877 1987 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 1988 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 1989 return memref;
23b2ce53 1990
f1ec5147 1991 if (validate)
23b2ce53 1992 {
f1ec5147 1993 if (reload_in_progress || reload_completed)
09e881c9 1994 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 1995 else
09e881c9 1996 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 1997 }
750c9258 1998
9b04c6a8
RK
1999 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2000 return memref;
2001
60564289
KG
2002 new_rtx = gen_rtx_MEM (mode, addr);
2003 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2004 return new_rtx;
23b2ce53 2005}
792760b9 2006
738cc472
RK
2007/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2008 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2009
2010rtx
502b8322 2011change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 2012{
f12144dd 2013 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
60564289 2014 enum machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2015 struct mem_attrs attrs, *defattrs;
4e44c1ef 2016
f12144dd
RS
2017 attrs = *get_mem_attrs (memref);
2018 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2019 attrs.expr = NULL_TREE;
2020 attrs.offset_known_p = false;
2021 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2022 attrs.size = defattrs->size;
2023 attrs.align = defattrs->align;
c2f7bcc3 2024
fdb1c7b3 2025 /* If there are no changes, just return the original memory reference. */
60564289 2026 if (new_rtx == memref)
4e44c1ef 2027 {
f12144dd 2028 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2029 return new_rtx;
4e44c1ef 2030
60564289
KG
2031 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2032 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2033 }
fdb1c7b3 2034
f12144dd 2035 set_mem_attrs (new_rtx, &attrs);
60564289 2036 return new_rtx;
f4ef873c 2037}
792760b9 2038
738cc472
RK
2039/* Return a memory reference like MEMREF, but with its mode changed
2040 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2041 nonzero, the memory address is forced to be valid.
2042 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2043 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
2044
2045rtx
502b8322
AJ
2046adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2047 int validate, int adjust)
f1ec5147 2048{
823e3574 2049 rtx addr = XEXP (memref, 0);
60564289 2050 rtx new_rtx;
f12144dd 2051 enum machine_mode address_mode;
a6fe9ed4 2052 int pbits;
f12144dd
RS
2053 struct mem_attrs attrs, *defattrs;
2054 unsigned HOST_WIDE_INT max_align;
2055
2056 attrs = *get_mem_attrs (memref);
823e3574 2057
fdb1c7b3
JH
2058 /* If there are no changes, just return the original memory reference. */
2059 if (mode == GET_MODE (memref) && !offset
f12144dd
RS
2060 && (!validate || memory_address_addr_space_p (mode, addr,
2061 attrs.addrspace)))
fdb1c7b3
JH
2062 return memref;
2063
d14419e4 2064 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2065 This may happen even if offset is nonzero -- consider
d14419e4
RH
2066 (plus (plus reg reg) const_int) -- so do this always. */
2067 addr = copy_rtx (addr);
2068
a6fe9ed4
JM
2069 /* Convert a possibly large offset to a signed value within the
2070 range of the target address space. */
f12144dd 2071 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
d4ebfa65 2072 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2073 if (HOST_BITS_PER_WIDE_INT > pbits)
2074 {
2075 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2076 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2077 >> shift);
2078 }
2079
4a78c787
RH
2080 if (adjust)
2081 {
2082 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2083 object, we can merge it into the LO_SUM. */
2084 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2085 && offset >= 0
2086 && (unsigned HOST_WIDE_INT) offset
2087 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2088 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
4a78c787
RH
2089 plus_constant (XEXP (addr, 1), offset));
2090 else
2091 addr = plus_constant (addr, offset);
2092 }
823e3574 2093
60564289 2094 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472 2095
09efeca1
PB
2096 /* If the address is a REG, change_address_1 rightfully returns memref,
2097 but this would destroy memref's MEM_ATTRS. */
2098 if (new_rtx == memref && offset != 0)
2099 new_rtx = copy_rtx (new_rtx);
2100
738cc472
RK
2101 /* Compute the new values of the memory attributes due to this adjustment.
2102 We add the offsets and update the alignment. */
754c3d5d
RS
2103 if (attrs.offset_known_p)
2104 attrs.offset += offset;
738cc472 2105
03bf2c23
RK
2106 /* Compute the new alignment by taking the MIN of the alignment and the
2107 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2108 if zero. */
2109 if (offset != 0)
f12144dd
RS
2110 {
2111 max_align = (offset & -offset) * BITS_PER_UNIT;
2112 attrs.align = MIN (attrs.align, max_align);
2113 }
738cc472 2114
10b76d73 2115 /* We can compute the size in a number of ways. */
f12144dd 2116 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
754c3d5d
RS
2117 if (defattrs->size_known_p)
2118 {
2119 attrs.size_known_p = true;
2120 attrs.size = defattrs->size;
2121 }
2122 else if (attrs.size_known_p)
2123 attrs.size -= offset;
10b76d73 2124
f12144dd 2125 set_mem_attrs (new_rtx, &attrs);
738cc472
RK
2126
2127 /* At some point, we should validate that this offset is within the object,
2128 if all the appropriate values are known. */
60564289 2129 return new_rtx;
f1ec5147
RK
2130}
2131
630036c6
JJ
2132/* Return a memory reference like MEMREF, but with its mode changed
2133 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2134 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2135 nonzero, the memory address is forced to be valid. */
2136
2137rtx
502b8322
AJ
2138adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2139 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2140{
2141 memref = change_address_1 (memref, VOIDmode, addr, validate);
2142 return adjust_address_1 (memref, mode, offset, validate, 0);
2143}
2144
8ac61af7
RK
2145/* Return a memory reference like MEMREF, but whose address is changed by
2146 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2147 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2148
2149rtx
502b8322 2150offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2151{
60564289 2152 rtx new_rtx, addr = XEXP (memref, 0);
f12144dd 2153 enum machine_mode address_mode;
754c3d5d 2154 struct mem_attrs attrs, *defattrs;
e3c8ea67 2155
f12144dd
RS
2156 attrs = *get_mem_attrs (memref);
2157 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
d4ebfa65 2158 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2159
68252e27 2160 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2161 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2162
2163 However, if we did go and rearrange things, we can wind up not
2164 being able to recognize the magic around pic_offset_table_rtx.
2165 This stuff is fragile, and is yet another example of why it is
2166 bad to expose PIC machinery too early. */
f12144dd
RS
2167 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2168 attrs.addrspace)
e3c8ea67
RH
2169 && GET_CODE (addr) == PLUS
2170 && XEXP (addr, 0) == pic_offset_table_rtx)
2171 {
2172 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2173 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2174 }
2175
60564289
KG
2176 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2177 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2178
fdb1c7b3 2179 /* If there are no changes, just return the original memory reference. */
60564289
KG
2180 if (new_rtx == memref)
2181 return new_rtx;
fdb1c7b3 2182
0d4903b8
RK
2183 /* Update the alignment to reflect the offset. Reset the offset, which
2184 we don't know. */
754c3d5d
RS
2185 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2186 attrs.offset_known_p = false;
2187 attrs.size_known_p = defattrs->size_known_p;
2188 attrs.size = defattrs->size;
f12144dd
RS
2189 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2190 set_mem_attrs (new_rtx, &attrs);
60564289 2191 return new_rtx;
0d4903b8 2192}
68252e27 2193
792760b9
RK
2194/* Return a memory reference like MEMREF, but with its address changed to
2195 ADDR. The caller is asserting that the actual piece of memory pointed
2196 to is the same, just the form of the address is being changed, such as
2197 by putting something into a register. */
2198
2199rtx
502b8322 2200replace_equiv_address (rtx memref, rtx addr)
792760b9 2201{
738cc472
RK
2202 /* change_address_1 copies the memory attribute structure without change
2203 and that's exactly what we want here. */
40c0668b 2204 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2205 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2206}
738cc472 2207
f1ec5147
RK
2208/* Likewise, but the reference is not required to be valid. */
2209
2210rtx
502b8322 2211replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2212{
f1ec5147
RK
2213 return change_address_1 (memref, VOIDmode, addr, 0);
2214}
e7dfe4bb
RH
2215
2216/* Return a memory reference like MEMREF, but with its mode widened to
2217 MODE and offset by OFFSET. This would be used by targets that e.g.
2218 cannot issue QImode memory operations and have to use SImode memory
2219 operations plus masking logic. */
2220
2221rtx
502b8322 2222widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2223{
60564289 2224 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
f12144dd 2225 struct mem_attrs attrs;
e7dfe4bb
RH
2226 unsigned int size = GET_MODE_SIZE (mode);
2227
fdb1c7b3 2228 /* If there are no changes, just return the original memory reference. */
60564289
KG
2229 if (new_rtx == memref)
2230 return new_rtx;
fdb1c7b3 2231
f12144dd
RS
2232 attrs = *get_mem_attrs (new_rtx);
2233
e7dfe4bb
RH
2234 /* If we don't know what offset we were at within the expression, then
2235 we can't know if we've overstepped the bounds. */
754c3d5d 2236 if (! attrs.offset_known_p)
f12144dd 2237 attrs.expr = NULL_TREE;
e7dfe4bb 2238
f12144dd 2239 while (attrs.expr)
e7dfe4bb 2240 {
f12144dd 2241 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2242 {
f12144dd
RS
2243 tree field = TREE_OPERAND (attrs.expr, 1);
2244 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2245
2246 if (! DECL_SIZE_UNIT (field))
2247 {
f12144dd 2248 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2249 break;
2250 }
2251
2252 /* Is the field at least as large as the access? If so, ok,
2253 otherwise strip back to the containing structure. */
03667700
RK
2254 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2255 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2256 && attrs.offset >= 0)
e7dfe4bb
RH
2257 break;
2258
44de5aeb 2259 if (! host_integerp (offset, 1))
e7dfe4bb 2260 {
f12144dd 2261 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2262 break;
2263 }
2264
f12144dd 2265 attrs.expr = TREE_OPERAND (attrs.expr, 0);
754c3d5d
RS
2266 attrs.offset += tree_low_cst (offset, 1);
2267 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2268 / BITS_PER_UNIT);
e7dfe4bb
RH
2269 }
2270 /* Similarly for the decl. */
f12144dd
RS
2271 else if (DECL_P (attrs.expr)
2272 && DECL_SIZE_UNIT (attrs.expr)
2273 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2274 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2275 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2276 break;
2277 else
2278 {
2279 /* The widened memory access overflows the expression, which means
2280 that it could alias another expression. Zap it. */
f12144dd 2281 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2282 break;
2283 }
2284 }
2285
f12144dd 2286 if (! attrs.expr)
754c3d5d 2287 attrs.offset_known_p = false;
e7dfe4bb
RH
2288
2289 /* The widened memory may alias other stuff, so zap the alias set. */
2290 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2291 attrs.alias = 0;
754c3d5d
RS
2292 attrs.size_known_p = true;
2293 attrs.size = size;
f12144dd 2294 set_mem_attrs (new_rtx, &attrs);
60564289 2295 return new_rtx;
e7dfe4bb 2296}
23b2ce53 2297\f
f6129d66
RH
2298/* A fake decl that is used as the MEM_EXPR of spill slots. */
2299static GTY(()) tree spill_slot_decl;
2300
3d7e23f6
RH
2301tree
2302get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2303{
2304 tree d = spill_slot_decl;
2305 rtx rd;
f12144dd 2306 struct mem_attrs attrs;
f6129d66 2307
3d7e23f6 2308 if (d || !force_build_p)
f6129d66
RH
2309 return d;
2310
c2255bc4
AH
2311 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2312 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2313 DECL_ARTIFICIAL (d) = 1;
2314 DECL_IGNORED_P (d) = 1;
2315 TREE_USED (d) = 1;
f6129d66
RH
2316 spill_slot_decl = d;
2317
2318 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2319 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2320 attrs = *mode_mem_attrs[(int) BLKmode];
2321 attrs.alias = new_alias_set ();
2322 attrs.expr = d;
2323 set_mem_attrs (rd, &attrs);
f6129d66
RH
2324 SET_DECL_RTL (d, rd);
2325
2326 return d;
2327}
2328
2329/* Given MEM, a result from assign_stack_local, fill in the memory
2330 attributes as appropriate for a register allocator spill slot.
2331 These slots are not aliasable by other memory. We arrange for
2332 them all to use a single MEM_EXPR, so that the aliasing code can
2333 work properly in the case of shared spill slots. */
2334
2335void
2336set_mem_attrs_for_spill (rtx mem)
2337{
f12144dd
RS
2338 struct mem_attrs attrs;
2339 rtx addr;
f6129d66 2340
f12144dd
RS
2341 attrs = *get_mem_attrs (mem);
2342 attrs.expr = get_spill_slot_decl (true);
2343 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2344 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2345
2346 /* We expect the incoming memory to be of the form:
2347 (mem:MODE (plus (reg sfp) (const_int offset)))
2348 with perhaps the plus missing for offset = 0. */
2349 addr = XEXP (mem, 0);
754c3d5d
RS
2350 attrs.offset_known_p = true;
2351 attrs.offset = 0;
f6129d66 2352 if (GET_CODE (addr) == PLUS
481683e1 2353 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2354 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2355
f12144dd 2356 set_mem_attrs (mem, &attrs);
f6129d66
RH
2357 MEM_NOTRAP_P (mem) = 1;
2358}
2359\f
23b2ce53
RS
2360/* Return a newly created CODE_LABEL rtx with a unique label number. */
2361
2362rtx
502b8322 2363gen_label_rtx (void)
23b2ce53 2364{
0dc36574 2365 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2366 NULL, label_num++, NULL);
23b2ce53
RS
2367}
2368\f
2369/* For procedure integration. */
2370
23b2ce53 2371/* Install new pointers to the first and last insns in the chain.
86fe05e0 2372 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2373 Used for an inline-procedure after copying the insn chain. */
2374
2375void
502b8322 2376set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2377{
86fe05e0
RK
2378 rtx insn;
2379
5936d944
JH
2380 set_first_insn (first);
2381 set_last_insn (last);
86fe05e0
RK
2382 cur_insn_uid = 0;
2383
b5b8b0ac
AO
2384 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2385 {
2386 int debug_count = 0;
2387
2388 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2389 cur_debug_insn_uid = 0;
2390
2391 for (insn = first; insn; insn = NEXT_INSN (insn))
2392 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2393 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2394 else
2395 {
2396 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2397 if (DEBUG_INSN_P (insn))
2398 debug_count++;
2399 }
2400
2401 if (debug_count)
2402 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2403 else
2404 cur_debug_insn_uid++;
2405 }
2406 else
2407 for (insn = first; insn; insn = NEXT_INSN (insn))
2408 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2409
2410 cur_insn_uid++;
23b2ce53 2411}
23b2ce53 2412\f
750c9258 2413/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2414 structure. This routine should only be called once. */
23b2ce53 2415
fd743bc1 2416static void
b4aaa77b 2417unshare_all_rtl_1 (rtx insn)
23b2ce53 2418{
d1b81779 2419 /* Unshare just about everything else. */
2c07f13b 2420 unshare_all_rtl_in_chain (insn);
750c9258 2421
23b2ce53
RS
2422 /* Make sure the addresses of stack slots found outside the insn chain
2423 (such as, in DECL_RTL of a variable) are not shared
2424 with the insn chain.
2425
2426 This special care is necessary when the stack slot MEM does not
2427 actually appear in the insn chain. If it does appear, its address
2428 is unshared from all else at that point. */
242b0ce6 2429 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2430}
2431
750c9258 2432/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2433 structure, again. This is a fairly expensive thing to do so it
2434 should be done sparingly. */
2435
2436void
502b8322 2437unshare_all_rtl_again (rtx insn)
d1b81779
GK
2438{
2439 rtx p;
624c87aa
RE
2440 tree decl;
2441
d1b81779 2442 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2443 if (INSN_P (p))
d1b81779
GK
2444 {
2445 reset_used_flags (PATTERN (p));
2446 reset_used_flags (REG_NOTES (p));
d1b81779 2447 }
624c87aa 2448
2d4aecb3 2449 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2450 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2451
624c87aa 2452 /* Make sure that virtual parameters are not shared. */
910ad8de 2453 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2454 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2455
2456 reset_used_flags (stack_slot_list);
2457
b4aaa77b 2458 unshare_all_rtl_1 (insn);
fd743bc1
PB
2459}
2460
c2924966 2461unsigned int
fd743bc1
PB
2462unshare_all_rtl (void)
2463{
b4aaa77b 2464 unshare_all_rtl_1 (get_insns ());
c2924966 2465 return 0;
d1b81779
GK
2466}
2467
8ddbbcae 2468struct rtl_opt_pass pass_unshare_all_rtl =
ef330312 2469{
8ddbbcae
JH
2470 {
2471 RTL_PASS,
defb77dc 2472 "unshare", /* name */
ef330312
PB
2473 NULL, /* gate */
2474 unshare_all_rtl, /* execute */
2475 NULL, /* sub */
2476 NULL, /* next */
2477 0, /* static_pass_number */
7072a650 2478 TV_NONE, /* tv_id */
ef330312
PB
2479 0, /* properties_required */
2480 0, /* properties_provided */
2481 0, /* properties_destroyed */
2482 0, /* todo_flags_start */
22c5fa5f 2483 TODO_verify_rtl_sharing /* todo_flags_finish */
8ddbbcae 2484 }
ef330312
PB
2485};
2486
2487
2c07f13b
JH
2488/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2489 Recursively does the same for subexpressions. */
2490
2491static void
2492verify_rtx_sharing (rtx orig, rtx insn)
2493{
2494 rtx x = orig;
2495 int i;
2496 enum rtx_code code;
2497 const char *format_ptr;
2498
2499 if (x == 0)
2500 return;
2501
2502 code = GET_CODE (x);
2503
2504 /* These types may be freely shared. */
2505
2506 switch (code)
2507 {
2508 case REG:
0ca5af51
AO
2509 case DEBUG_EXPR:
2510 case VALUE:
2c07f13b
JH
2511 case CONST_INT:
2512 case CONST_DOUBLE:
091a3ac7 2513 case CONST_FIXED:
2c07f13b
JH
2514 case CONST_VECTOR:
2515 case SYMBOL_REF:
2516 case LABEL_REF:
2517 case CODE_LABEL:
2518 case PC:
2519 case CC0:
3810076b 2520 case RETURN:
2c07f13b 2521 case SCRATCH:
2c07f13b 2522 return;
3e89ed8d
JH
2523 /* SCRATCH must be shared because they represent distinct values. */
2524 case CLOBBER:
2525 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2526 return;
2527 break;
2c07f13b
JH
2528
2529 case CONST:
6fb5fa3c 2530 if (shared_const_p (orig))
2c07f13b
JH
2531 return;
2532 break;
2533
2534 case MEM:
2535 /* A MEM is allowed to be shared if its address is constant. */
2536 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2537 || reload_completed || reload_in_progress)
2538 return;
2539
2540 break;
2541
2542 default:
2543 break;
2544 }
2545
2546 /* This rtx may not be shared. If it has already been seen,
2547 replace it with a copy of itself. */
1a2caa7a 2548#ifdef ENABLE_CHECKING
2c07f13b
JH
2549 if (RTX_FLAG (x, used))
2550 {
ab532386 2551 error ("invalid rtl sharing found in the insn");
2c07f13b 2552 debug_rtx (insn);
ab532386 2553 error ("shared rtx");
2c07f13b 2554 debug_rtx (x);
ab532386 2555 internal_error ("internal consistency failure");
2c07f13b 2556 }
1a2caa7a
NS
2557#endif
2558 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2559
2c07f13b
JH
2560 RTX_FLAG (x, used) = 1;
2561
6614fd40 2562 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2563
2564 format_ptr = GET_RTX_FORMAT (code);
2565
2566 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2567 {
2568 switch (*format_ptr++)
2569 {
2570 case 'e':
2571 verify_rtx_sharing (XEXP (x, i), insn);
2572 break;
2573
2574 case 'E':
2575 if (XVEC (x, i) != NULL)
2576 {
2577 int j;
2578 int len = XVECLEN (x, i);
2579
2580 for (j = 0; j < len; j++)
2581 {
1a2caa7a
NS
2582 /* We allow sharing of ASM_OPERANDS inside single
2583 instruction. */
2c07f13b 2584 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2585 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2586 == ASM_OPERANDS))
2c07f13b
JH
2587 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2588 else
2589 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2590 }
2591 }
2592 break;
2593 }
2594 }
2595 return;
2596}
2597
ba228239 2598/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2599 sharing in between the subexpressions. */
2600
24e47c76 2601DEBUG_FUNCTION void
2c07f13b
JH
2602verify_rtl_sharing (void)
2603{
2604 rtx p;
2605
a222c01a
MM
2606 timevar_push (TV_VERIFY_RTL_SHARING);
2607
2c07f13b
JH
2608 for (p = get_insns (); p; p = NEXT_INSN (p))
2609 if (INSN_P (p))
2610 {
2611 reset_used_flags (PATTERN (p));
2612 reset_used_flags (REG_NOTES (p));
2954a813
KK
2613 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2614 {
2615 int i;
2616 rtx q, sequence = PATTERN (p);
2617
2618 for (i = 0; i < XVECLEN (sequence, 0); i++)
2619 {
2620 q = XVECEXP (sequence, 0, i);
2621 gcc_assert (INSN_P (q));
2622 reset_used_flags (PATTERN (q));
2623 reset_used_flags (REG_NOTES (q));
2954a813
KK
2624 }
2625 }
2c07f13b
JH
2626 }
2627
2628 for (p = get_insns (); p; p = NEXT_INSN (p))
2629 if (INSN_P (p))
2630 {
2631 verify_rtx_sharing (PATTERN (p), p);
2632 verify_rtx_sharing (REG_NOTES (p), p);
2c07f13b 2633 }
a222c01a
MM
2634
2635 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2636}
2637
d1b81779
GK
2638/* Go through all the RTL insn bodies and copy any invalid shared structure.
2639 Assumes the mark bits are cleared at entry. */
2640
2c07f13b
JH
2641void
2642unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2643{
2644 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2645 if (INSN_P (insn))
d1b81779
GK
2646 {
2647 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2648 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
d1b81779
GK
2649 }
2650}
2651
2d4aecb3 2652/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2653 shared. We never replace the DECL_RTLs themselves with a copy,
2654 but expressions mentioned into a DECL_RTL cannot be shared with
2655 expressions in the instruction stream.
2656
2657 Note that reload may convert pseudo registers into memories in-place.
2658 Pseudo registers are always shared, but MEMs never are. Thus if we
2659 reset the used flags on MEMs in the instruction stream, we must set
2660 them again on MEMs that appear in DECL_RTLs. */
2661
2d4aecb3 2662static void
5eb2a9f2 2663set_used_decls (tree blk)
2d4aecb3
AO
2664{
2665 tree t;
2666
2667 /* Mark decls. */
910ad8de 2668 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2669 if (DECL_RTL_SET_P (t))
5eb2a9f2 2670 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2671
2672 /* Now process sub-blocks. */
87caf699 2673 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2674 set_used_decls (t);
2d4aecb3
AO
2675}
2676
23b2ce53 2677/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2678 Recursively does the same for subexpressions. Uses
2679 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2680
2681rtx
502b8322 2682copy_rtx_if_shared (rtx orig)
23b2ce53 2683{
32b32b16
AP
2684 copy_rtx_if_shared_1 (&orig);
2685 return orig;
2686}
2687
ff954f39
AP
2688/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2689 use. Recursively does the same for subexpressions. */
2690
32b32b16
AP
2691static void
2692copy_rtx_if_shared_1 (rtx *orig1)
2693{
2694 rtx x;
b3694847
SS
2695 int i;
2696 enum rtx_code code;
32b32b16 2697 rtx *last_ptr;
b3694847 2698 const char *format_ptr;
23b2ce53 2699 int copied = 0;
32b32b16
AP
2700 int length;
2701
2702 /* Repeat is used to turn tail-recursion into iteration. */
2703repeat:
2704 x = *orig1;
23b2ce53
RS
2705
2706 if (x == 0)
32b32b16 2707 return;
23b2ce53
RS
2708
2709 code = GET_CODE (x);
2710
2711 /* These types may be freely shared. */
2712
2713 switch (code)
2714 {
2715 case REG:
0ca5af51
AO
2716 case DEBUG_EXPR:
2717 case VALUE:
23b2ce53
RS
2718 case CONST_INT:
2719 case CONST_DOUBLE:
091a3ac7 2720 case CONST_FIXED:
69ef87e2 2721 case CONST_VECTOR:
23b2ce53 2722 case SYMBOL_REF:
2c07f13b 2723 case LABEL_REF:
23b2ce53
RS
2724 case CODE_LABEL:
2725 case PC:
2726 case CC0:
2727 case SCRATCH:
0f41302f 2728 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2729 return;
3e89ed8d
JH
2730 case CLOBBER:
2731 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2732 return;
2733 break;
23b2ce53 2734
b851ea09 2735 case CONST:
6fb5fa3c 2736 if (shared_const_p (x))
32b32b16 2737 return;
b851ea09
RK
2738 break;
2739
b5b8b0ac 2740 case DEBUG_INSN:
23b2ce53
RS
2741 case INSN:
2742 case JUMP_INSN:
2743 case CALL_INSN:
2744 case NOTE:
23b2ce53
RS
2745 case BARRIER:
2746 /* The chain of insns is not being copied. */
32b32b16 2747 return;
23b2ce53 2748
e9a25f70
JL
2749 default:
2750 break;
23b2ce53
RS
2751 }
2752
2753 /* This rtx may not be shared. If it has already been seen,
2754 replace it with a copy of itself. */
2755
2adc7f12 2756 if (RTX_FLAG (x, used))
23b2ce53 2757 {
aacd3885 2758 x = shallow_copy_rtx (x);
23b2ce53
RS
2759 copied = 1;
2760 }
2adc7f12 2761 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2762
2763 /* Now scan the subexpressions recursively.
2764 We can store any replaced subexpressions directly into X
2765 since we know X is not shared! Any vectors in X
2766 must be copied if X was copied. */
2767
2768 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2769 length = GET_RTX_LENGTH (code);
2770 last_ptr = NULL;
b8698a0f 2771
32b32b16 2772 for (i = 0; i < length; i++)
23b2ce53
RS
2773 {
2774 switch (*format_ptr++)
2775 {
2776 case 'e':
32b32b16
AP
2777 if (last_ptr)
2778 copy_rtx_if_shared_1 (last_ptr);
2779 last_ptr = &XEXP (x, i);
23b2ce53
RS
2780 break;
2781
2782 case 'E':
2783 if (XVEC (x, i) != NULL)
2784 {
b3694847 2785 int j;
f0722107 2786 int len = XVECLEN (x, i);
b8698a0f 2787
6614fd40
KH
2788 /* Copy the vector iff I copied the rtx and the length
2789 is nonzero. */
f0722107 2790 if (copied && len > 0)
8f985ec4 2791 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2792
5d3cc252 2793 /* Call recursively on all inside the vector. */
f0722107 2794 for (j = 0; j < len; j++)
32b32b16
AP
2795 {
2796 if (last_ptr)
2797 copy_rtx_if_shared_1 (last_ptr);
2798 last_ptr = &XVECEXP (x, i, j);
2799 }
23b2ce53
RS
2800 }
2801 break;
2802 }
2803 }
32b32b16
AP
2804 *orig1 = x;
2805 if (last_ptr)
2806 {
2807 orig1 = last_ptr;
2808 goto repeat;
2809 }
2810 return;
23b2ce53
RS
2811}
2812
76369a82 2813/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 2814
76369a82
NF
2815static void
2816mark_used_flags (rtx x, int flag)
23b2ce53 2817{
b3694847
SS
2818 int i, j;
2819 enum rtx_code code;
2820 const char *format_ptr;
32b32b16 2821 int length;
23b2ce53 2822
32b32b16
AP
2823 /* Repeat is used to turn tail-recursion into iteration. */
2824repeat:
23b2ce53
RS
2825 if (x == 0)
2826 return;
2827
2828 code = GET_CODE (x);
2829
9faa82d8 2830 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2831 for them. */
2832
2833 switch (code)
2834 {
2835 case REG:
0ca5af51
AO
2836 case DEBUG_EXPR:
2837 case VALUE:
23b2ce53
RS
2838 case CONST_INT:
2839 case CONST_DOUBLE:
091a3ac7 2840 case CONST_FIXED:
69ef87e2 2841 case CONST_VECTOR:
23b2ce53
RS
2842 case SYMBOL_REF:
2843 case CODE_LABEL:
2844 case PC:
2845 case CC0:
2846 return;
2847
b5b8b0ac 2848 case DEBUG_INSN:
23b2ce53
RS
2849 case INSN:
2850 case JUMP_INSN:
2851 case CALL_INSN:
2852 case NOTE:
2853 case LABEL_REF:
2854 case BARRIER:
2855 /* The chain of insns is not being copied. */
2856 return;
750c9258 2857
e9a25f70
JL
2858 default:
2859 break;
23b2ce53
RS
2860 }
2861
76369a82 2862 RTX_FLAG (x, used) = flag;
23b2ce53
RS
2863
2864 format_ptr = GET_RTX_FORMAT (code);
32b32b16 2865 length = GET_RTX_LENGTH (code);
b8698a0f 2866
32b32b16 2867 for (i = 0; i < length; i++)
23b2ce53
RS
2868 {
2869 switch (*format_ptr++)
2870 {
2871 case 'e':
32b32b16
AP
2872 if (i == length-1)
2873 {
2874 x = XEXP (x, i);
2875 goto repeat;
2876 }
76369a82 2877 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
2878 break;
2879
2880 case 'E':
2881 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 2882 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
2883 break;
2884 }
2885 }
2886}
2c07f13b 2887
76369a82 2888/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
2889 to look for shared sub-parts. */
2890
2891void
76369a82 2892reset_used_flags (rtx x)
2c07f13b 2893{
76369a82
NF
2894 mark_used_flags (x, 0);
2895}
2c07f13b 2896
76369a82
NF
2897/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2898 to look for shared sub-parts. */
2c07f13b 2899
76369a82
NF
2900void
2901set_used_flags (rtx x)
2902{
2903 mark_used_flags (x, 1);
2c07f13b 2904}
23b2ce53
RS
2905\f
2906/* Copy X if necessary so that it won't be altered by changes in OTHER.
2907 Return X or the rtx for the pseudo reg the value of X was copied into.
2908 OTHER must be valid as a SET_DEST. */
2909
2910rtx
502b8322 2911make_safe_from (rtx x, rtx other)
23b2ce53
RS
2912{
2913 while (1)
2914 switch (GET_CODE (other))
2915 {
2916 case SUBREG:
2917 other = SUBREG_REG (other);
2918 break;
2919 case STRICT_LOW_PART:
2920 case SIGN_EXTEND:
2921 case ZERO_EXTEND:
2922 other = XEXP (other, 0);
2923 break;
2924 default:
2925 goto done;
2926 }
2927 done:
3c0cb5de 2928 if ((MEM_P (other)
23b2ce53 2929 && ! CONSTANT_P (x)
f8cfc6aa 2930 && !REG_P (x)
23b2ce53 2931 && GET_CODE (x) != SUBREG)
f8cfc6aa 2932 || (REG_P (other)
23b2ce53
RS
2933 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2934 || reg_mentioned_p (other, x))))
2935 {
2936 rtx temp = gen_reg_rtx (GET_MODE (x));
2937 emit_move_insn (temp, x);
2938 return temp;
2939 }
2940 return x;
2941}
2942\f
2943/* Emission of insns (adding them to the doubly-linked list). */
2944
23b2ce53
RS
2945/* Return the last insn emitted, even if it is in a sequence now pushed. */
2946
2947rtx
502b8322 2948get_last_insn_anywhere (void)
23b2ce53
RS
2949{
2950 struct sequence_stack *stack;
5936d944
JH
2951 if (get_last_insn ())
2952 return get_last_insn ();
49ad7cfa 2953 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2954 if (stack->last != 0)
2955 return stack->last;
2956 return 0;
2957}
2958
2a496e8b
JDA
2959/* Return the first nonnote insn emitted in current sequence or current
2960 function. This routine looks inside SEQUENCEs. */
2961
2962rtx
502b8322 2963get_first_nonnote_insn (void)
2a496e8b 2964{
5936d944 2965 rtx insn = get_insns ();
91373fe8
JDA
2966
2967 if (insn)
2968 {
2969 if (NOTE_P (insn))
2970 for (insn = next_insn (insn);
2971 insn && NOTE_P (insn);
2972 insn = next_insn (insn))
2973 continue;
2974 else
2975 {
2ca202e7 2976 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2977 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2978 insn = XVECEXP (PATTERN (insn), 0, 0);
2979 }
2980 }
2a496e8b
JDA
2981
2982 return insn;
2983}
2984
2985/* Return the last nonnote insn emitted in current sequence or current
2986 function. This routine looks inside SEQUENCEs. */
2987
2988rtx
502b8322 2989get_last_nonnote_insn (void)
2a496e8b 2990{
5936d944 2991 rtx insn = get_last_insn ();
91373fe8
JDA
2992
2993 if (insn)
2994 {
2995 if (NOTE_P (insn))
2996 for (insn = previous_insn (insn);
2997 insn && NOTE_P (insn);
2998 insn = previous_insn (insn))
2999 continue;
3000 else
3001 {
2ca202e7 3002 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
3003 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3004 insn = XVECEXP (PATTERN (insn), 0,
3005 XVECLEN (PATTERN (insn), 0) - 1);
3006 }
3007 }
2a496e8b
JDA
3008
3009 return insn;
3010}
3011
b5b8b0ac
AO
3012/* Return the number of actual (non-debug) insns emitted in this
3013 function. */
3014
3015int
3016get_max_insn_count (void)
3017{
3018 int n = cur_insn_uid;
3019
3020 /* The table size must be stable across -g, to avoid codegen
3021 differences due to debug insns, and not be affected by
3022 -fmin-insn-uid, to avoid excessive table size and to simplify
3023 debugging of -fcompare-debug failures. */
3024 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3025 n -= cur_debug_insn_uid;
3026 else
3027 n -= MIN_NONDEBUG_INSN_UID;
3028
3029 return n;
3030}
3031
23b2ce53
RS
3032\f
3033/* Return the next insn. If it is a SEQUENCE, return the first insn
3034 of the sequence. */
3035
3036rtx
502b8322 3037next_insn (rtx insn)
23b2ce53 3038{
75547801
KG
3039 if (insn)
3040 {
3041 insn = NEXT_INSN (insn);
3042 if (insn && NONJUMP_INSN_P (insn)
3043 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3044 insn = XVECEXP (PATTERN (insn), 0, 0);
3045 }
23b2ce53 3046
75547801 3047 return insn;
23b2ce53
RS
3048}
3049
3050/* Return the previous insn. If it is a SEQUENCE, return the last insn
3051 of the sequence. */
3052
3053rtx
502b8322 3054previous_insn (rtx insn)
23b2ce53 3055{
75547801
KG
3056 if (insn)
3057 {
3058 insn = PREV_INSN (insn);
3059 if (insn && NONJUMP_INSN_P (insn)
3060 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3061 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3062 }
23b2ce53 3063
75547801 3064 return insn;
23b2ce53
RS
3065}
3066
3067/* Return the next insn after INSN that is not a NOTE. This routine does not
3068 look inside SEQUENCEs. */
3069
3070rtx
502b8322 3071next_nonnote_insn (rtx insn)
23b2ce53 3072{
75547801
KG
3073 while (insn)
3074 {
3075 insn = NEXT_INSN (insn);
3076 if (insn == 0 || !NOTE_P (insn))
3077 break;
3078 }
23b2ce53 3079
75547801 3080 return insn;
23b2ce53
RS
3081}
3082
1e211590
DD
3083/* Return the next insn after INSN that is not a NOTE, but stop the
3084 search before we enter another basic block. This routine does not
3085 look inside SEQUENCEs. */
3086
3087rtx
3088next_nonnote_insn_bb (rtx insn)
3089{
3090 while (insn)
3091 {
3092 insn = NEXT_INSN (insn);
3093 if (insn == 0 || !NOTE_P (insn))
3094 break;
3095 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3096 return NULL_RTX;
3097 }
3098
3099 return insn;
3100}
3101
23b2ce53
RS
3102/* Return the previous insn before INSN that is not a NOTE. This routine does
3103 not look inside SEQUENCEs. */
3104
3105rtx
502b8322 3106prev_nonnote_insn (rtx insn)
23b2ce53 3107{
75547801
KG
3108 while (insn)
3109 {
3110 insn = PREV_INSN (insn);
3111 if (insn == 0 || !NOTE_P (insn))
3112 break;
3113 }
23b2ce53 3114
75547801 3115 return insn;
23b2ce53
RS
3116}
3117
896aa4ea
DD
3118/* Return the previous insn before INSN that is not a NOTE, but stop
3119 the search before we enter another basic block. This routine does
3120 not look inside SEQUENCEs. */
3121
3122rtx
3123prev_nonnote_insn_bb (rtx insn)
3124{
3125 while (insn)
3126 {
3127 insn = PREV_INSN (insn);
3128 if (insn == 0 || !NOTE_P (insn))
3129 break;
3130 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3131 return NULL_RTX;
3132 }
3133
3134 return insn;
3135}
3136
b5b8b0ac
AO
3137/* Return the next insn after INSN that is not a DEBUG_INSN. This
3138 routine does not look inside SEQUENCEs. */
3139
3140rtx
3141next_nondebug_insn (rtx insn)
3142{
3143 while (insn)
3144 {
3145 insn = NEXT_INSN (insn);
3146 if (insn == 0 || !DEBUG_INSN_P (insn))
3147 break;
3148 }
3149
3150 return insn;
3151}
3152
3153/* Return the previous insn before INSN that is not a DEBUG_INSN.
3154 This routine does not look inside SEQUENCEs. */
3155
3156rtx
3157prev_nondebug_insn (rtx insn)
3158{
3159 while (insn)
3160 {
3161 insn = PREV_INSN (insn);
3162 if (insn == 0 || !DEBUG_INSN_P (insn))
3163 break;
3164 }
3165
3166 return insn;
3167}
3168
f0fc0803
JJ
3169/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3170 This routine does not look inside SEQUENCEs. */
3171
3172rtx
3173next_nonnote_nondebug_insn (rtx insn)
3174{
3175 while (insn)
3176 {
3177 insn = NEXT_INSN (insn);
3178 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3179 break;
3180 }
3181
3182 return insn;
3183}
3184
3185/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3186 This routine does not look inside SEQUENCEs. */
3187
3188rtx
3189prev_nonnote_nondebug_insn (rtx insn)
3190{
3191 while (insn)
3192 {
3193 insn = PREV_INSN (insn);
3194 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3195 break;
3196 }
3197
3198 return insn;
3199}
3200
23b2ce53
RS
3201/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3202 or 0, if there is none. This routine does not look inside
0f41302f 3203 SEQUENCEs. */
23b2ce53
RS
3204
3205rtx
502b8322 3206next_real_insn (rtx insn)
23b2ce53 3207{
75547801
KG
3208 while (insn)
3209 {
3210 insn = NEXT_INSN (insn);
3211 if (insn == 0 || INSN_P (insn))
3212 break;
3213 }
23b2ce53 3214
75547801 3215 return insn;
23b2ce53
RS
3216}
3217
3218/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3219 or 0, if there is none. This routine does not look inside
3220 SEQUENCEs. */
3221
3222rtx
502b8322 3223prev_real_insn (rtx insn)
23b2ce53 3224{
75547801
KG
3225 while (insn)
3226 {
3227 insn = PREV_INSN (insn);
3228 if (insn == 0 || INSN_P (insn))
3229 break;
3230 }
23b2ce53 3231
75547801 3232 return insn;
23b2ce53
RS
3233}
3234
ee960939
OH
3235/* Return the last CALL_INSN in the current list, or 0 if there is none.
3236 This routine does not look inside SEQUENCEs. */
3237
3238rtx
502b8322 3239last_call_insn (void)
ee960939
OH
3240{
3241 rtx insn;
3242
3243 for (insn = get_last_insn ();
4b4bf941 3244 insn && !CALL_P (insn);
ee960939
OH
3245 insn = PREV_INSN (insn))
3246 ;
3247
3248 return insn;
3249}
3250
23b2ce53 3251/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3252 does not look inside SEQUENCEs. After reload this also skips over
3253 standalone USE and CLOBBER insn. */
23b2ce53 3254
69732dcb 3255int
4f588890 3256active_insn_p (const_rtx insn)
69732dcb 3257{
4b4bf941
JQ
3258 return (CALL_P (insn) || JUMP_P (insn)
3259 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3260 && (! reload_completed
3261 || (GET_CODE (PATTERN (insn)) != USE
3262 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3263}
3264
23b2ce53 3265rtx
502b8322 3266next_active_insn (rtx insn)
23b2ce53 3267{
75547801
KG
3268 while (insn)
3269 {
3270 insn = NEXT_INSN (insn);
3271 if (insn == 0 || active_insn_p (insn))
3272 break;
3273 }
23b2ce53 3274
75547801 3275 return insn;
23b2ce53
RS
3276}
3277
3278/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3279 does not look inside SEQUENCEs. After reload this also skips over
3280 standalone USE and CLOBBER insn. */
23b2ce53
RS
3281
3282rtx
502b8322 3283prev_active_insn (rtx insn)
23b2ce53 3284{
75547801
KG
3285 while (insn)
3286 {
3287 insn = PREV_INSN (insn);
3288 if (insn == 0 || active_insn_p (insn))
3289 break;
3290 }
23b2ce53 3291
75547801 3292 return insn;
23b2ce53
RS
3293}
3294
3295/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3296
3297rtx
502b8322 3298next_label (rtx insn)
23b2ce53 3299{
75547801
KG
3300 while (insn)
3301 {
3302 insn = NEXT_INSN (insn);
3303 if (insn == 0 || LABEL_P (insn))
3304 break;
3305 }
23b2ce53 3306
75547801 3307 return insn;
23b2ce53
RS
3308}
3309
3310/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3311
3312rtx
502b8322 3313prev_label (rtx insn)
23b2ce53 3314{
75547801
KG
3315 while (insn)
3316 {
3317 insn = PREV_INSN (insn);
3318 if (insn == 0 || LABEL_P (insn))
3319 break;
3320 }
23b2ce53 3321
75547801 3322 return insn;
23b2ce53 3323}
6c2511d3 3324
dc0ff1c8
BS
3325/* Return the last label to mark the same position as LABEL. Return LABEL
3326 itself if it is null or any return rtx. */
6c2511d3
RS
3327
3328rtx
3329skip_consecutive_labels (rtx label)
3330{
3331 rtx insn;
3332
dc0ff1c8
BS
3333 if (label && ANY_RETURN_P (label))
3334 return label;
3335
6c2511d3
RS
3336 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3337 if (LABEL_P (insn))
3338 label = insn;
3339
3340 return label;
3341}
23b2ce53
RS
3342\f
3343#ifdef HAVE_cc0
c572e5ba
JVA
3344/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3345 and REG_CC_USER notes so we can find it. */
3346
3347void
502b8322 3348link_cc0_insns (rtx insn)
c572e5ba
JVA
3349{
3350 rtx user = next_nonnote_insn (insn);
3351
4b4bf941 3352 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3353 user = XVECEXP (PATTERN (user), 0, 0);
3354
65c5f2a6
ILT
3355 add_reg_note (user, REG_CC_SETTER, insn);
3356 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3357}
3358
23b2ce53
RS
3359/* Return the next insn that uses CC0 after INSN, which is assumed to
3360 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3361 applied to the result of this function should yield INSN).
3362
3363 Normally, this is simply the next insn. However, if a REG_CC_USER note
3364 is present, it contains the insn that uses CC0.
3365
3366 Return 0 if we can't find the insn. */
3367
3368rtx
502b8322 3369next_cc0_user (rtx insn)
23b2ce53 3370{
906c4e36 3371 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3372
3373 if (note)
3374 return XEXP (note, 0);
3375
3376 insn = next_nonnote_insn (insn);
4b4bf941 3377 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3378 insn = XVECEXP (PATTERN (insn), 0, 0);
3379
2c3c49de 3380 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3381 return insn;
3382
3383 return 0;
3384}
3385
3386/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3387 note, it is the previous insn. */
3388
3389rtx
502b8322 3390prev_cc0_setter (rtx insn)
23b2ce53 3391{
906c4e36 3392 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3393
3394 if (note)
3395 return XEXP (note, 0);
3396
3397 insn = prev_nonnote_insn (insn);
5b0264cb 3398 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3399
3400 return insn;
3401}
3402#endif
e5bef2e4 3403
594f8779
RZ
3404#ifdef AUTO_INC_DEC
3405/* Find a RTX_AUTOINC class rtx which matches DATA. */
3406
3407static int
3408find_auto_inc (rtx *xp, void *data)
3409{
3410 rtx x = *xp;
5ead67f6 3411 rtx reg = (rtx) data;
594f8779
RZ
3412
3413 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3414 return 0;
3415
3416 switch (GET_CODE (x))
3417 {
3418 case PRE_DEC:
3419 case PRE_INC:
3420 case POST_DEC:
3421 case POST_INC:
3422 case PRE_MODIFY:
3423 case POST_MODIFY:
3424 if (rtx_equal_p (reg, XEXP (x, 0)))
3425 return 1;
3426 break;
3427
3428 default:
3429 gcc_unreachable ();
3430 }
3431 return -1;
3432}
3433#endif
3434
e5bef2e4
HB
3435/* Increment the label uses for all labels present in rtx. */
3436
3437static void
502b8322 3438mark_label_nuses (rtx x)
e5bef2e4 3439{
b3694847
SS
3440 enum rtx_code code;
3441 int i, j;
3442 const char *fmt;
e5bef2e4
HB
3443
3444 code = GET_CODE (x);
7537fc90 3445 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3446 LABEL_NUSES (XEXP (x, 0))++;
3447
3448 fmt = GET_RTX_FORMAT (code);
3449 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3450 {
3451 if (fmt[i] == 'e')
0fb7aeda 3452 mark_label_nuses (XEXP (x, i));
e5bef2e4 3453 else if (fmt[i] == 'E')
0fb7aeda 3454 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3455 mark_label_nuses (XVECEXP (x, i, j));
3456 }
3457}
3458
23b2ce53
RS
3459\f
3460/* Try splitting insns that can be split for better scheduling.
3461 PAT is the pattern which might split.
3462 TRIAL is the insn providing PAT.
cc2902df 3463 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3464
3465 If this routine succeeds in splitting, it returns the first or last
11147ebe 3466 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3467 returns TRIAL. If the insn to be returned can be split, it will be. */
3468
3469rtx
502b8322 3470try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3471{
3472 rtx before = PREV_INSN (trial);
3473 rtx after = NEXT_INSN (trial);
23b2ce53 3474 int has_barrier = 0;
4a8cae83 3475 rtx note, seq, tem;
6b24c259 3476 int probability;
599aedd9
RH
3477 rtx insn_last, insn;
3478 int njumps = 0;
6b24c259 3479
cd9c1ca8
RH
3480 /* We're not good at redistributing frame information. */
3481 if (RTX_FRAME_RELATED_P (trial))
3482 return trial;
3483
6b24c259
JH
3484 if (any_condjump_p (trial)
3485 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3486 split_branch_probability = INTVAL (XEXP (note, 0));
3487 probability = split_branch_probability;
3488
3489 seq = split_insns (pat, trial);
3490
3491 split_branch_probability = -1;
23b2ce53
RS
3492
3493 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3494 We may need to handle this specially. */
4b4bf941 3495 if (after && BARRIER_P (after))
23b2ce53
RS
3496 {
3497 has_barrier = 1;
3498 after = NEXT_INSN (after);
3499 }
3500
599aedd9
RH
3501 if (!seq)
3502 return trial;
3503
3504 /* Avoid infinite loop if any insn of the result matches
3505 the original pattern. */
3506 insn_last = seq;
3507 while (1)
23b2ce53 3508 {
599aedd9
RH
3509 if (INSN_P (insn_last)
3510 && rtx_equal_p (PATTERN (insn_last), pat))
3511 return trial;
3512 if (!NEXT_INSN (insn_last))
3513 break;
3514 insn_last = NEXT_INSN (insn_last);
3515 }
750c9258 3516
6fb5fa3c
DB
3517 /* We will be adding the new sequence to the function. The splitters
3518 may have introduced invalid RTL sharing, so unshare the sequence now. */
3519 unshare_all_rtl_in_chain (seq);
3520
599aedd9
RH
3521 /* Mark labels. */
3522 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3523 {
4b4bf941 3524 if (JUMP_P (insn))
599aedd9
RH
3525 {
3526 mark_jump_label (PATTERN (insn), insn, 0);
3527 njumps++;
3528 if (probability != -1
3529 && any_condjump_p (insn)
3530 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3531 {
599aedd9
RH
3532 /* We can preserve the REG_BR_PROB notes only if exactly
3533 one jump is created, otherwise the machine description
3534 is responsible for this step using
3535 split_branch_probability variable. */
5b0264cb 3536 gcc_assert (njumps == 1);
65c5f2a6 3537 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3538 }
599aedd9
RH
3539 }
3540 }
3541
3542 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3543 in SEQ and copy any additional information across. */
4b4bf941 3544 if (CALL_P (trial))
599aedd9
RH
3545 {
3546 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3547 if (CALL_P (insn))
599aedd9 3548 {
65712d5c
RS
3549 rtx next, *p;
3550
3551 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3552 target may have explicitly specified. */
3553 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3554 while (*p)
3555 p = &XEXP (*p, 1);
3556 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3557
3558 /* If the old call was a sibling call, the new one must
3559 be too. */
599aedd9 3560 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3561
3562 /* If the new call is the last instruction in the sequence,
3563 it will effectively replace the old call in-situ. Otherwise
3564 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3565 so that it comes immediately after the new call. */
3566 if (NEXT_INSN (insn))
65f3dedb
RS
3567 for (next = NEXT_INSN (trial);
3568 next && NOTE_P (next);
3569 next = NEXT_INSN (next))
3570 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3571 {
3572 remove_insn (next);
3573 add_insn_after (next, insn, NULL);
65f3dedb 3574 break;
65712d5c 3575 }
599aedd9
RH
3576 }
3577 }
4b5e8abe 3578
599aedd9
RH
3579 /* Copy notes, particularly those related to the CFG. */
3580 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3581 {
3582 switch (REG_NOTE_KIND (note))
3583 {
3584 case REG_EH_REGION:
1d65f45c 3585 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3586 break;
216183ce 3587
599aedd9
RH
3588 case REG_NORETURN:
3589 case REG_SETJMP:
594f8779 3590 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3591 {
4b4bf941 3592 if (CALL_P (insn))
65c5f2a6 3593 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3594 }
599aedd9 3595 break;
d6e95df8 3596
599aedd9 3597 case REG_NON_LOCAL_GOTO:
594f8779 3598 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3599 {
4b4bf941 3600 if (JUMP_P (insn))
65c5f2a6 3601 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3602 }
599aedd9 3603 break;
e5bef2e4 3604
594f8779
RZ
3605#ifdef AUTO_INC_DEC
3606 case REG_INC:
3607 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3608 {
3609 rtx reg = XEXP (note, 0);
3610 if (!FIND_REG_INC_NOTE (insn, reg)
3611 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3612 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3613 }
3614 break;
3615#endif
3616
599aedd9
RH
3617 default:
3618 break;
23b2ce53 3619 }
599aedd9
RH
3620 }
3621
3622 /* If there are LABELS inside the split insns increment the
3623 usage count so we don't delete the label. */
cf7c4aa6 3624 if (INSN_P (trial))
599aedd9
RH
3625 {
3626 insn = insn_last;
3627 while (insn != NULL_RTX)
23b2ce53 3628 {
cf7c4aa6 3629 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3630 if (NONJUMP_INSN_P (insn))
599aedd9 3631 mark_label_nuses (PATTERN (insn));
23b2ce53 3632
599aedd9
RH
3633 insn = PREV_INSN (insn);
3634 }
23b2ce53
RS
3635 }
3636
0435312e 3637 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3638
3639 delete_insn (trial);
3640 if (has_barrier)
3641 emit_barrier_after (tem);
3642
3643 /* Recursively call try_split for each new insn created; by the
3644 time control returns here that insn will be fully split, so
3645 set LAST and continue from the insn after the one returned.
3646 We can't use next_active_insn here since AFTER may be a note.
3647 Ignore deleted insns, which can be occur if not optimizing. */
3648 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3649 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3650 tem = try_split (PATTERN (tem), tem, 1);
3651
3652 /* Return either the first or the last insn, depending on which was
3653 requested. */
3654 return last
5936d944 3655 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3656 : NEXT_INSN (before);
23b2ce53
RS
3657}
3658\f
3659/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3660 Store PATTERN in the pattern slots. */
23b2ce53
RS
3661
3662rtx
502b8322 3663make_insn_raw (rtx pattern)
23b2ce53 3664{
b3694847 3665 rtx insn;
23b2ce53 3666
1f8f4a0b 3667 insn = rtx_alloc (INSN);
23b2ce53 3668
43127294 3669 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3670 PATTERN (insn) = pattern;
3671 INSN_CODE (insn) = -1;
1632afca 3672 REG_NOTES (insn) = NULL;
55e092c4 3673 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3674 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3675
47984720
NC
3676#ifdef ENABLE_RTL_CHECKING
3677 if (insn
2c3c49de 3678 && INSN_P (insn)
47984720
NC
3679 && (returnjump_p (insn)
3680 || (GET_CODE (insn) == SET
3681 && SET_DEST (insn) == pc_rtx)))
3682 {
d4ee4d25 3683 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3684 debug_rtx (insn);
3685 }
3686#endif
750c9258 3687
23b2ce53
RS
3688 return insn;
3689}
3690
b5b8b0ac
AO
3691/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3692
3693rtx
3694make_debug_insn_raw (rtx pattern)
3695{
3696 rtx insn;
3697
3698 insn = rtx_alloc (DEBUG_INSN);
3699 INSN_UID (insn) = cur_debug_insn_uid++;
3700 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3701 INSN_UID (insn) = cur_insn_uid++;
3702
3703 PATTERN (insn) = pattern;
3704 INSN_CODE (insn) = -1;
3705 REG_NOTES (insn) = NULL;
3706 INSN_LOCATOR (insn) = curr_insn_locator ();
3707 BLOCK_FOR_INSN (insn) = NULL;
3708
3709 return insn;
3710}
3711
2f937369 3712/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3713
38109dab 3714rtx
502b8322 3715make_jump_insn_raw (rtx pattern)
23b2ce53 3716{
b3694847 3717 rtx insn;
23b2ce53 3718
4b1f5e8c 3719 insn = rtx_alloc (JUMP_INSN);
1632afca 3720 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3721
3722 PATTERN (insn) = pattern;
3723 INSN_CODE (insn) = -1;
1632afca
RS
3724 REG_NOTES (insn) = NULL;
3725 JUMP_LABEL (insn) = NULL;
55e092c4 3726 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3727 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3728
3729 return insn;
3730}
aff507f4 3731
2f937369 3732/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3733
3734static rtx
502b8322 3735make_call_insn_raw (rtx pattern)
aff507f4 3736{
b3694847 3737 rtx insn;
aff507f4
RK
3738
3739 insn = rtx_alloc (CALL_INSN);
3740 INSN_UID (insn) = cur_insn_uid++;
3741
3742 PATTERN (insn) = pattern;
3743 INSN_CODE (insn) = -1;
aff507f4
RK
3744 REG_NOTES (insn) = NULL;
3745 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
55e092c4 3746 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3747 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3748
3749 return insn;
3750}
23b2ce53
RS
3751\f
3752/* Add INSN to the end of the doubly-linked list.
3753 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3754
3755void
502b8322 3756add_insn (rtx insn)
23b2ce53 3757{
5936d944 3758 PREV_INSN (insn) = get_last_insn();
23b2ce53
RS
3759 NEXT_INSN (insn) = 0;
3760
5936d944
JH
3761 if (NULL != get_last_insn())
3762 NEXT_INSN (get_last_insn ()) = insn;
23b2ce53 3763
5936d944
JH
3764 if (NULL == get_insns ())
3765 set_first_insn (insn);
23b2ce53 3766
5936d944 3767 set_last_insn (insn);
23b2ce53
RS
3768}
3769
a0ae8e8d
RK
3770/* Add INSN into the doubly-linked list after insn AFTER. This and
3771 the next should be the only functions called to insert an insn once
ba213285 3772 delay slots have been filled since only they know how to update a
a0ae8e8d 3773 SEQUENCE. */
23b2ce53
RS
3774
3775void
6fb5fa3c 3776add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3777{
3778 rtx next = NEXT_INSN (after);
3779
5b0264cb 3780 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3781
23b2ce53
RS
3782 NEXT_INSN (insn) = next;
3783 PREV_INSN (insn) = after;
3784
3785 if (next)
3786 {
3787 PREV_INSN (next) = insn;
4b4bf941 3788 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3789 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3790 }
5936d944
JH
3791 else if (get_last_insn () == after)
3792 set_last_insn (insn);
23b2ce53
RS
3793 else
3794 {
49ad7cfa 3795 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3796 /* Scan all pending sequences too. */
3797 for (; stack; stack = stack->next)
3798 if (after == stack->last)
fef0509b
RK
3799 {
3800 stack->last = insn;
3801 break;
3802 }
a0ae8e8d 3803
5b0264cb 3804 gcc_assert (stack);
23b2ce53
RS
3805 }
3806
4b4bf941
JQ
3807 if (!BARRIER_P (after)
3808 && !BARRIER_P (insn)
3c030e88
JH
3809 && (bb = BLOCK_FOR_INSN (after)))
3810 {
3811 set_block_for_insn (insn, bb);
38c1593d 3812 if (INSN_P (insn))
6fb5fa3c 3813 df_insn_rescan (insn);
3c030e88 3814 /* Should not happen as first in the BB is always
a1f300c0 3815 either NOTE or LABEL. */
a813c111 3816 if (BB_END (bb) == after
3c030e88 3817 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3818 && !BARRIER_P (insn)
a38e7aa5 3819 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3820 BB_END (bb) = insn;
3c030e88
JH
3821 }
3822
23b2ce53 3823 NEXT_INSN (after) = insn;
4b4bf941 3824 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3825 {
3826 rtx sequence = PATTERN (after);
3827 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3828 }
3829}
3830
a0ae8e8d 3831/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3832 the previous should be the only functions called to insert an insn
3833 once delay slots have been filled since only they know how to
3834 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3835 bb from before. */
a0ae8e8d
RK
3836
3837void
6fb5fa3c 3838add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3839{
3840 rtx prev = PREV_INSN (before);
3841
5b0264cb 3842 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3843
a0ae8e8d
RK
3844 PREV_INSN (insn) = prev;
3845 NEXT_INSN (insn) = before;
3846
3847 if (prev)
3848 {
3849 NEXT_INSN (prev) = insn;
4b4bf941 3850 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3851 {
3852 rtx sequence = PATTERN (prev);
3853 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3854 }
3855 }
5936d944
JH
3856 else if (get_insns () == before)
3857 set_first_insn (insn);
a0ae8e8d
RK
3858 else
3859 {
49ad7cfa 3860 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3861 /* Scan all pending sequences too. */
3862 for (; stack; stack = stack->next)
3863 if (before == stack->first)
fef0509b
RK
3864 {
3865 stack->first = insn;
3866 break;
3867 }
a0ae8e8d 3868
5b0264cb 3869 gcc_assert (stack);
a0ae8e8d
RK
3870 }
3871
b8698a0f 3872 if (!bb
6fb5fa3c
DB
3873 && !BARRIER_P (before)
3874 && !BARRIER_P (insn))
3875 bb = BLOCK_FOR_INSN (before);
3876
3877 if (bb)
3c030e88
JH
3878 {
3879 set_block_for_insn (insn, bb);
38c1593d 3880 if (INSN_P (insn))
6fb5fa3c 3881 df_insn_rescan (insn);
5b0264cb 3882 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3883 LABEL. */
5b0264cb
NS
3884 gcc_assert (BB_HEAD (bb) != insn
3885 /* Avoid clobbering of structure when creating new BB. */
3886 || BARRIER_P (insn)
a38e7aa5 3887 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3888 }
3889
a0ae8e8d 3890 PREV_INSN (before) = insn;
4b4bf941 3891 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3892 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3893}
3894
6fb5fa3c
DB
3895
3896/* Replace insn with an deleted instruction note. */
3897
0ce2b299
EB
3898void
3899set_insn_deleted (rtx insn)
6fb5fa3c
DB
3900{
3901 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3902 PUT_CODE (insn, NOTE);
3903 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3904}
3905
3906
89e99eea
DB
3907/* Remove an insn from its doubly-linked list. This function knows how
3908 to handle sequences. */
3909void
502b8322 3910remove_insn (rtx insn)
89e99eea
DB
3911{
3912 rtx next = NEXT_INSN (insn);
3913 rtx prev = PREV_INSN (insn);
53c17031
JH
3914 basic_block bb;
3915
6fb5fa3c
DB
3916 /* Later in the code, the block will be marked dirty. */
3917 df_insn_delete (NULL, INSN_UID (insn));
3918
89e99eea
DB
3919 if (prev)
3920 {
3921 NEXT_INSN (prev) = next;
4b4bf941 3922 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3923 {
3924 rtx sequence = PATTERN (prev);
3925 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3926 }
3927 }
5936d944
JH
3928 else if (get_insns () == insn)
3929 {
fb9ef4c1
JH
3930 if (next)
3931 PREV_INSN (next) = NULL;
5936d944
JH
3932 set_first_insn (next);
3933 }
89e99eea
DB
3934 else
3935 {
49ad7cfa 3936 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3937 /* Scan all pending sequences too. */
3938 for (; stack; stack = stack->next)
3939 if (insn == stack->first)
3940 {
3941 stack->first = next;
3942 break;
3943 }
3944
5b0264cb 3945 gcc_assert (stack);
89e99eea
DB
3946 }
3947
3948 if (next)
3949 {
3950 PREV_INSN (next) = prev;
4b4bf941 3951 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3952 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3953 }
5936d944
JH
3954 else if (get_last_insn () == insn)
3955 set_last_insn (prev);
89e99eea
DB
3956 else
3957 {
49ad7cfa 3958 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3959 /* Scan all pending sequences too. */
3960 for (; stack; stack = stack->next)
3961 if (insn == stack->last)
3962 {
3963 stack->last = prev;
3964 break;
3965 }
3966
5b0264cb 3967 gcc_assert (stack);
89e99eea 3968 }
4b4bf941 3969 if (!BARRIER_P (insn)
53c17031
JH
3970 && (bb = BLOCK_FOR_INSN (insn)))
3971 {
4e0084e4 3972 if (NONDEBUG_INSN_P (insn))
6fb5fa3c 3973 df_set_bb_dirty (bb);
a813c111 3974 if (BB_HEAD (bb) == insn)
53c17031 3975 {
3bf1e984
RK
3976 /* Never ever delete the basic block note without deleting whole
3977 basic block. */
5b0264cb 3978 gcc_assert (!NOTE_P (insn));
a813c111 3979 BB_HEAD (bb) = next;
53c17031 3980 }
a813c111
SB
3981 if (BB_END (bb) == insn)
3982 BB_END (bb) = prev;
53c17031 3983 }
89e99eea
DB
3984}
3985
ee960939
OH
3986/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3987
3988void
502b8322 3989add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 3990{
5b0264cb 3991 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
3992
3993 /* Put the register usage information on the CALL. If there is already
3994 some usage information, put ours at the end. */
3995 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3996 {
3997 rtx link;
3998
3999 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4000 link = XEXP (link, 1))
4001 ;
4002
4003 XEXP (link, 1) = call_fusage;
4004 }
4005 else
4006 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4007}
4008
23b2ce53
RS
4009/* Delete all insns made since FROM.
4010 FROM becomes the new last instruction. */
4011
4012void
502b8322 4013delete_insns_since (rtx from)
23b2ce53
RS
4014{
4015 if (from == 0)
5936d944 4016 set_first_insn (0);
23b2ce53
RS
4017 else
4018 NEXT_INSN (from) = 0;
5936d944 4019 set_last_insn (from);
23b2ce53
RS
4020}
4021
5dab5552
MS
4022/* This function is deprecated, please use sequences instead.
4023
4024 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4025 The insns to be moved are those between FROM and TO.
4026 They are moved to a new position after the insn AFTER.
4027 AFTER must not be FROM or TO or any insn in between.
4028
4029 This function does not know about SEQUENCEs and hence should not be
4030 called after delay-slot filling has been done. */
4031
4032void
502b8322 4033reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53 4034{
4f8344eb
HPN
4035#ifdef ENABLE_CHECKING
4036 rtx x;
4037 for (x = from; x != to; x = NEXT_INSN (x))
4038 gcc_assert (after != x);
4039 gcc_assert (after != to);
4040#endif
4041
23b2ce53
RS
4042 /* Splice this bunch out of where it is now. */
4043 if (PREV_INSN (from))
4044 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4045 if (NEXT_INSN (to))
4046 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4047 if (get_last_insn () == to)
4048 set_last_insn (PREV_INSN (from));
4049 if (get_insns () == from)
4050 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4051
4052 /* Make the new neighbors point to it and it to them. */
4053 if (NEXT_INSN (after))
4054 PREV_INSN (NEXT_INSN (after)) = to;
4055
4056 NEXT_INSN (to) = NEXT_INSN (after);
4057 PREV_INSN (from) = after;
4058 NEXT_INSN (after) = from;
5936d944
JH
4059 if (after == get_last_insn())
4060 set_last_insn (to);
23b2ce53
RS
4061}
4062
3c030e88
JH
4063/* Same as function above, but take care to update BB boundaries. */
4064void
502b8322 4065reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
4066{
4067 rtx prev = PREV_INSN (from);
4068 basic_block bb, bb2;
4069
4070 reorder_insns_nobb (from, to, after);
4071
4b4bf941 4072 if (!BARRIER_P (after)
3c030e88
JH
4073 && (bb = BLOCK_FOR_INSN (after)))
4074 {
4075 rtx x;
6fb5fa3c 4076 df_set_bb_dirty (bb);
68252e27 4077
4b4bf941 4078 if (!BARRIER_P (from)
3c030e88
JH
4079 && (bb2 = BLOCK_FOR_INSN (from)))
4080 {
a813c111
SB
4081 if (BB_END (bb2) == to)
4082 BB_END (bb2) = prev;
6fb5fa3c 4083 df_set_bb_dirty (bb2);
3c030e88
JH
4084 }
4085
a813c111
SB
4086 if (BB_END (bb) == after)
4087 BB_END (bb) = to;
3c030e88
JH
4088
4089 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4090 if (!BARRIER_P (x))
63642d5a 4091 df_insn_change_bb (x, bb);
3c030e88
JH
4092 }
4093}
4094
23b2ce53 4095\f
2f937369
DM
4096/* Emit insn(s) of given code and pattern
4097 at a specified place within the doubly-linked list.
23b2ce53 4098
2f937369
DM
4099 All of the emit_foo global entry points accept an object
4100 X which is either an insn list or a PATTERN of a single
4101 instruction.
23b2ce53 4102
2f937369
DM
4103 There are thus a few canonical ways to generate code and
4104 emit it at a specific place in the instruction stream. For
4105 example, consider the instruction named SPOT and the fact that
4106 we would like to emit some instructions before SPOT. We might
4107 do it like this:
23b2ce53 4108
2f937369
DM
4109 start_sequence ();
4110 ... emit the new instructions ...
4111 insns_head = get_insns ();
4112 end_sequence ();
23b2ce53 4113
2f937369 4114 emit_insn_before (insns_head, SPOT);
23b2ce53 4115
2f937369
DM
4116 It used to be common to generate SEQUENCE rtl instead, but that
4117 is a relic of the past which no longer occurs. The reason is that
4118 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4119 generated would almost certainly die right after it was created. */
23b2ce53 4120
5f02387d
NF
4121static rtx
4122emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4123 rtx (*make_raw) (rtx))
23b2ce53 4124{
b3694847 4125 rtx insn;
23b2ce53 4126
5b0264cb 4127 gcc_assert (before);
2f937369
DM
4128
4129 if (x == NULL_RTX)
4130 return last;
4131
4132 switch (GET_CODE (x))
23b2ce53 4133 {
b5b8b0ac 4134 case DEBUG_INSN:
2f937369
DM
4135 case INSN:
4136 case JUMP_INSN:
4137 case CALL_INSN:
4138 case CODE_LABEL:
4139 case BARRIER:
4140 case NOTE:
4141 insn = x;
4142 while (insn)
4143 {
4144 rtx next = NEXT_INSN (insn);
6fb5fa3c 4145 add_insn_before (insn, before, bb);
2f937369
DM
4146 last = insn;
4147 insn = next;
4148 }
4149 break;
4150
4151#ifdef ENABLE_RTL_CHECKING
4152 case SEQUENCE:
5b0264cb 4153 gcc_unreachable ();
2f937369
DM
4154 break;
4155#endif
4156
4157 default:
5f02387d 4158 last = (*make_raw) (x);
6fb5fa3c 4159 add_insn_before (last, before, bb);
2f937369 4160 break;
23b2ce53
RS
4161 }
4162
2f937369 4163 return last;
23b2ce53
RS
4164}
4165
5f02387d
NF
4166/* Make X be output before the instruction BEFORE. */
4167
4168rtx
4169emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4170{
4171 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4172}
4173
2f937369 4174/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4175 and output it before the instruction BEFORE. */
4176
4177rtx
a7102479 4178emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4179{
5f02387d
NF
4180 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4181 make_jump_insn_raw);
23b2ce53
RS
4182}
4183
2f937369 4184/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4185 and output it before the instruction BEFORE. */
4186
4187rtx
a7102479 4188emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4189{
5f02387d
NF
4190 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4191 make_call_insn_raw);
969d70ca
JH
4192}
4193
b5b8b0ac
AO
4194/* Make an instruction with body X and code DEBUG_INSN
4195 and output it before the instruction BEFORE. */
4196
4197rtx
4198emit_debug_insn_before_noloc (rtx x, rtx before)
4199{
5f02387d
NF
4200 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4201 make_debug_insn_raw);
b5b8b0ac
AO
4202}
4203
23b2ce53 4204/* Make an insn of code BARRIER
e881bb1b 4205 and output it before the insn BEFORE. */
23b2ce53
RS
4206
4207rtx
502b8322 4208emit_barrier_before (rtx before)
23b2ce53 4209{
b3694847 4210 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4211
4212 INSN_UID (insn) = cur_insn_uid++;
4213
6fb5fa3c 4214 add_insn_before (insn, before, NULL);
23b2ce53
RS
4215 return insn;
4216}
4217
e881bb1b
RH
4218/* Emit the label LABEL before the insn BEFORE. */
4219
4220rtx
502b8322 4221emit_label_before (rtx label, rtx before)
e881bb1b
RH
4222{
4223 /* This can be called twice for the same label as a result of the
4224 confusion that follows a syntax error! So make it harmless. */
4225 if (INSN_UID (label) == 0)
4226 {
4227 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4228 add_insn_before (label, before, NULL);
e881bb1b
RH
4229 }
4230
4231 return label;
4232}
4233
23b2ce53
RS
4234/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4235
4236rtx
a38e7aa5 4237emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 4238{
b3694847 4239 rtx note = rtx_alloc (NOTE);
23b2ce53 4240 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4241 NOTE_KIND (note) = subtype;
ba4f7968 4242 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4243 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 4244
6fb5fa3c 4245 add_insn_before (note, before, NULL);
23b2ce53
RS
4246 return note;
4247}
4248\f
2f937369
DM
4249/* Helper for emit_insn_after, handles lists of instructions
4250 efficiently. */
23b2ce53 4251
2f937369 4252static rtx
6fb5fa3c 4253emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4254{
2f937369
DM
4255 rtx last;
4256 rtx after_after;
6fb5fa3c
DB
4257 if (!bb && !BARRIER_P (after))
4258 bb = BLOCK_FOR_INSN (after);
23b2ce53 4259
6fb5fa3c 4260 if (bb)
23b2ce53 4261 {
6fb5fa3c 4262 df_set_bb_dirty (bb);
2f937369 4263 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4264 if (!BARRIER_P (last))
6fb5fa3c
DB
4265 {
4266 set_block_for_insn (last, bb);
4267 df_insn_rescan (last);
4268 }
4b4bf941 4269 if (!BARRIER_P (last))
6fb5fa3c
DB
4270 {
4271 set_block_for_insn (last, bb);
4272 df_insn_rescan (last);
4273 }
a813c111
SB
4274 if (BB_END (bb) == after)
4275 BB_END (bb) = last;
23b2ce53
RS
4276 }
4277 else
2f937369
DM
4278 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4279 continue;
4280
4281 after_after = NEXT_INSN (after);
4282
4283 NEXT_INSN (after) = first;
4284 PREV_INSN (first) = after;
4285 NEXT_INSN (last) = after_after;
4286 if (after_after)
4287 PREV_INSN (after_after) = last;
4288
5936d944
JH
4289 if (after == get_last_insn())
4290 set_last_insn (last);
e855c69d 4291
2f937369
DM
4292 return last;
4293}
4294
5f02387d
NF
4295static rtx
4296emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4297 rtx (*make_raw)(rtx))
2f937369
DM
4298{
4299 rtx last = after;
4300
5b0264cb 4301 gcc_assert (after);
2f937369
DM
4302
4303 if (x == NULL_RTX)
4304 return last;
4305
4306 switch (GET_CODE (x))
23b2ce53 4307 {
b5b8b0ac 4308 case DEBUG_INSN:
2f937369
DM
4309 case INSN:
4310 case JUMP_INSN:
4311 case CALL_INSN:
4312 case CODE_LABEL:
4313 case BARRIER:
4314 case NOTE:
6fb5fa3c 4315 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4316 break;
4317
4318#ifdef ENABLE_RTL_CHECKING
4319 case SEQUENCE:
5b0264cb 4320 gcc_unreachable ();
2f937369
DM
4321 break;
4322#endif
4323
4324 default:
5f02387d 4325 last = (*make_raw) (x);
6fb5fa3c 4326 add_insn_after (last, after, bb);
2f937369 4327 break;
23b2ce53
RS
4328 }
4329
2f937369 4330 return last;
23b2ce53
RS
4331}
4332
5f02387d
NF
4333/* Make X be output after the insn AFTER and set the BB of insn. If
4334 BB is NULL, an attempt is made to infer the BB from AFTER. */
4335
4336rtx
4337emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4338{
4339 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4340}
4341
255680cf 4342
2f937369 4343/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4344 and output it after the insn AFTER. */
4345
4346rtx
a7102479 4347emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4348{
5f02387d 4349 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
2f937369
DM
4350}
4351
4352/* Make an instruction with body X and code CALL_INSN
4353 and output it after the instruction AFTER. */
4354
4355rtx
a7102479 4356emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4357{
5f02387d 4358 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4359}
4360
b5b8b0ac
AO
4361/* Make an instruction with body X and code CALL_INSN
4362 and output it after the instruction AFTER. */
4363
4364rtx
4365emit_debug_insn_after_noloc (rtx x, rtx after)
4366{
5f02387d 4367 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4368}
4369
23b2ce53
RS
4370/* Make an insn of code BARRIER
4371 and output it after the insn AFTER. */
4372
4373rtx
502b8322 4374emit_barrier_after (rtx after)
23b2ce53 4375{
b3694847 4376 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4377
4378 INSN_UID (insn) = cur_insn_uid++;
4379
6fb5fa3c 4380 add_insn_after (insn, after, NULL);
23b2ce53
RS
4381 return insn;
4382}
4383
4384/* Emit the label LABEL after the insn AFTER. */
4385
4386rtx
502b8322 4387emit_label_after (rtx label, rtx after)
23b2ce53
RS
4388{
4389 /* This can be called twice for the same label
4390 as a result of the confusion that follows a syntax error!
4391 So make it harmless. */
4392 if (INSN_UID (label) == 0)
4393 {
4394 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4395 add_insn_after (label, after, NULL);
23b2ce53
RS
4396 }
4397
4398 return label;
4399}
4400
4401/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4402
4403rtx
a38e7aa5 4404emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4405{
b3694847 4406 rtx note = rtx_alloc (NOTE);
23b2ce53 4407 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4408 NOTE_KIND (note) = subtype;
ba4f7968 4409 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4410 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4411 add_insn_after (note, after, NULL);
23b2ce53
RS
4412 return note;
4413}
23b2ce53 4414\f
e8110d6f
NF
4415/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4416 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4417
4418static rtx
4419emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4420 rtx (*make_raw) (rtx))
0d682900 4421{
e8110d6f 4422 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4423
a7102479 4424 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4425 return last;
4426
2f937369
DM
4427 after = NEXT_INSN (after);
4428 while (1)
4429 {
a7102479 4430 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4431 INSN_LOCATOR (after) = loc;
2f937369
DM
4432 if (after == last)
4433 break;
4434 after = NEXT_INSN (after);
4435 }
0d682900
JH
4436 return last;
4437}
4438
e8110d6f
NF
4439/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4440 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4441 any DEBUG_INSNs. */
4442
4443static rtx
4444emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4445 rtx (*make_raw) (rtx))
a7102479 4446{
b5b8b0ac
AO
4447 rtx prev = after;
4448
e8110d6f
NF
4449 if (skip_debug_insns)
4450 while (DEBUG_INSN_P (prev))
4451 prev = PREV_INSN (prev);
b5b8b0ac
AO
4452
4453 if (INSN_P (prev))
e8110d6f
NF
4454 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4455 make_raw);
a7102479 4456 else
e8110d6f 4457 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4458}
4459
e8110d6f 4460/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
0d682900 4461rtx
e8110d6f 4462emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4463{
e8110d6f
NF
4464 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4465}
2f937369 4466
e8110d6f
NF
4467/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4468rtx
4469emit_insn_after (rtx pattern, rtx after)
4470{
4471 return emit_pattern_after (pattern, after, true, make_insn_raw);
4472}
dd3adcf8 4473
e8110d6f
NF
4474/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4475rtx
4476emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4477{
4478 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
0d682900
JH
4479}
4480
a7102479
JH
4481/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4482rtx
4483emit_jump_insn_after (rtx pattern, rtx after)
4484{
e8110d6f 4485 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
a7102479
JH
4486}
4487
e8110d6f 4488/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
0d682900 4489rtx
502b8322 4490emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4491{
e8110d6f 4492 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4493}
4494
a7102479
JH
4495/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4496rtx
4497emit_call_insn_after (rtx pattern, rtx after)
4498{
e8110d6f 4499 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4500}
4501
e8110d6f 4502/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
b5b8b0ac
AO
4503rtx
4504emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4505{
e8110d6f 4506 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4507}
4508
4509/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4510rtx
4511emit_debug_insn_after (rtx pattern, rtx after)
4512{
e8110d6f 4513 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4514}
4515
e8110d6f
NF
4516/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4517 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4518 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4519 CALL_INSN, etc. */
4520
4521static rtx
4522emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4523 rtx (*make_raw) (rtx))
0d682900
JH
4524{
4525 rtx first = PREV_INSN (before);
e8110d6f
NF
4526 rtx last = emit_pattern_before_noloc (pattern, before,
4527 insnp ? before : NULL_RTX,
4528 NULL, make_raw);
a7102479
JH
4529
4530 if (pattern == NULL_RTX || !loc)
4531 return last;
4532
26cb3993
JH
4533 if (!first)
4534 first = get_insns ();
4535 else
4536 first = NEXT_INSN (first);
a7102479
JH
4537 while (1)
4538 {
4539 if (active_insn_p (first) && !INSN_LOCATOR (first))
4540 INSN_LOCATOR (first) = loc;
4541 if (first == last)
4542 break;
4543 first = NEXT_INSN (first);
4544 }
4545 return last;
4546}
4547
e8110d6f
NF
4548/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4549 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4550 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4551 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4552
4553static rtx
4554emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4555 bool insnp, rtx (*make_raw) (rtx))
a7102479 4556{
b5b8b0ac
AO
4557 rtx next = before;
4558
e8110d6f
NF
4559 if (skip_debug_insns)
4560 while (DEBUG_INSN_P (next))
4561 next = PREV_INSN (next);
b5b8b0ac
AO
4562
4563 if (INSN_P (next))
e8110d6f
NF
4564 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4565 insnp, make_raw);
a7102479 4566 else
e8110d6f
NF
4567 return emit_pattern_before_noloc (pattern, before,
4568 insnp ? before : NULL_RTX,
4569 NULL, make_raw);
a7102479
JH
4570}
4571
e8110d6f 4572/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
a7102479 4573rtx
e8110d6f 4574emit_insn_before_setloc (rtx pattern, rtx before, int loc)
a7102479 4575{
e8110d6f
NF
4576 return emit_pattern_before_setloc (pattern, before, loc, true,
4577 make_insn_raw);
4578}
a7102479 4579
e8110d6f
NF
4580/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4581rtx
4582emit_insn_before (rtx pattern, rtx before)
4583{
4584 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4585}
a7102479 4586
e8110d6f
NF
4587/* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4588rtx
4589emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4590{
4591 return emit_pattern_before_setloc (pattern, before, loc, false,
4592 make_jump_insn_raw);
a7102479
JH
4593}
4594
4595/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4596rtx
4597emit_jump_insn_before (rtx pattern, rtx before)
4598{
e8110d6f
NF
4599 return emit_pattern_before (pattern, before, true, false,
4600 make_jump_insn_raw);
a7102479
JH
4601}
4602
e8110d6f 4603/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
a7102479
JH
4604rtx
4605emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4606{
e8110d6f
NF
4607 return emit_pattern_before_setloc (pattern, before, loc, false,
4608 make_call_insn_raw);
0d682900 4609}
a7102479 4610
e8110d6f
NF
4611/* Like emit_call_insn_before_noloc,
4612 but set insn_locator according to BEFORE. */
a7102479
JH
4613rtx
4614emit_call_insn_before (rtx pattern, rtx before)
4615{
e8110d6f
NF
4616 return emit_pattern_before (pattern, before, true, false,
4617 make_call_insn_raw);
a7102479 4618}
b5b8b0ac 4619
e8110d6f 4620/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
b5b8b0ac
AO
4621rtx
4622emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4623{
e8110d6f
NF
4624 return emit_pattern_before_setloc (pattern, before, loc, false,
4625 make_debug_insn_raw);
b5b8b0ac
AO
4626}
4627
e8110d6f
NF
4628/* Like emit_debug_insn_before_noloc,
4629 but set insn_locator according to BEFORE. */
b5b8b0ac
AO
4630rtx
4631emit_debug_insn_before (rtx pattern, rtx before)
4632{
e8110d6f
NF
4633 return emit_pattern_before (pattern, before, false, false,
4634 make_debug_insn_raw);
b5b8b0ac 4635}
0d682900 4636\f
2f937369
DM
4637/* Take X and emit it at the end of the doubly-linked
4638 INSN list.
23b2ce53
RS
4639
4640 Returns the last insn emitted. */
4641
4642rtx
502b8322 4643emit_insn (rtx x)
23b2ce53 4644{
5936d944 4645 rtx last = get_last_insn();
2f937369 4646 rtx insn;
23b2ce53 4647
2f937369
DM
4648 if (x == NULL_RTX)
4649 return last;
23b2ce53 4650
2f937369
DM
4651 switch (GET_CODE (x))
4652 {
b5b8b0ac 4653 case DEBUG_INSN:
2f937369
DM
4654 case INSN:
4655 case JUMP_INSN:
4656 case CALL_INSN:
4657 case CODE_LABEL:
4658 case BARRIER:
4659 case NOTE:
4660 insn = x;
4661 while (insn)
23b2ce53 4662 {
2f937369 4663 rtx next = NEXT_INSN (insn);
23b2ce53 4664 add_insn (insn);
2f937369
DM
4665 last = insn;
4666 insn = next;
23b2ce53 4667 }
2f937369 4668 break;
23b2ce53 4669
2f937369
DM
4670#ifdef ENABLE_RTL_CHECKING
4671 case SEQUENCE:
5b0264cb 4672 gcc_unreachable ();
2f937369
DM
4673 break;
4674#endif
23b2ce53 4675
2f937369
DM
4676 default:
4677 last = make_insn_raw (x);
4678 add_insn (last);
4679 break;
23b2ce53
RS
4680 }
4681
4682 return last;
4683}
4684
b5b8b0ac
AO
4685/* Make an insn of code DEBUG_INSN with pattern X
4686 and add it to the end of the doubly-linked list. */
4687
4688rtx
4689emit_debug_insn (rtx x)
4690{
5936d944 4691 rtx last = get_last_insn();
b5b8b0ac
AO
4692 rtx insn;
4693
4694 if (x == NULL_RTX)
4695 return last;
4696
4697 switch (GET_CODE (x))
4698 {
4699 case DEBUG_INSN:
4700 case INSN:
4701 case JUMP_INSN:
4702 case CALL_INSN:
4703 case CODE_LABEL:
4704 case BARRIER:
4705 case NOTE:
4706 insn = x;
4707 while (insn)
4708 {
4709 rtx next = NEXT_INSN (insn);
4710 add_insn (insn);
4711 last = insn;
4712 insn = next;
4713 }
4714 break;
4715
4716#ifdef ENABLE_RTL_CHECKING
4717 case SEQUENCE:
4718 gcc_unreachable ();
4719 break;
4720#endif
4721
4722 default:
4723 last = make_debug_insn_raw (x);
4724 add_insn (last);
4725 break;
4726 }
4727
4728 return last;
4729}
4730
2f937369
DM
4731/* Make an insn of code JUMP_INSN with pattern X
4732 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4733
4734rtx
502b8322 4735emit_jump_insn (rtx x)
23b2ce53 4736{
d950dee3 4737 rtx last = NULL_RTX, insn;
23b2ce53 4738
2f937369 4739 switch (GET_CODE (x))
23b2ce53 4740 {
b5b8b0ac 4741 case DEBUG_INSN:
2f937369
DM
4742 case INSN:
4743 case JUMP_INSN:
4744 case CALL_INSN:
4745 case CODE_LABEL:
4746 case BARRIER:
4747 case NOTE:
4748 insn = x;
4749 while (insn)
4750 {
4751 rtx next = NEXT_INSN (insn);
4752 add_insn (insn);
4753 last = insn;
4754 insn = next;
4755 }
4756 break;
e0a5c5eb 4757
2f937369
DM
4758#ifdef ENABLE_RTL_CHECKING
4759 case SEQUENCE:
5b0264cb 4760 gcc_unreachable ();
2f937369
DM
4761 break;
4762#endif
e0a5c5eb 4763
2f937369
DM
4764 default:
4765 last = make_jump_insn_raw (x);
4766 add_insn (last);
4767 break;
3c030e88 4768 }
e0a5c5eb
RS
4769
4770 return last;
4771}
4772
2f937369 4773/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4774 and add it to the end of the doubly-linked list. */
4775
4776rtx
502b8322 4777emit_call_insn (rtx x)
23b2ce53 4778{
2f937369
DM
4779 rtx insn;
4780
4781 switch (GET_CODE (x))
23b2ce53 4782 {
b5b8b0ac 4783 case DEBUG_INSN:
2f937369
DM
4784 case INSN:
4785 case JUMP_INSN:
4786 case CALL_INSN:
4787 case CODE_LABEL:
4788 case BARRIER:
4789 case NOTE:
4790 insn = emit_insn (x);
4791 break;
23b2ce53 4792
2f937369
DM
4793#ifdef ENABLE_RTL_CHECKING
4794 case SEQUENCE:
5b0264cb 4795 gcc_unreachable ();
2f937369
DM
4796 break;
4797#endif
23b2ce53 4798
2f937369
DM
4799 default:
4800 insn = make_call_insn_raw (x);
23b2ce53 4801 add_insn (insn);
2f937369 4802 break;
23b2ce53 4803 }
2f937369
DM
4804
4805 return insn;
23b2ce53
RS
4806}
4807
4808/* Add the label LABEL to the end of the doubly-linked list. */
4809
4810rtx
502b8322 4811emit_label (rtx label)
23b2ce53
RS
4812{
4813 /* This can be called twice for the same label
4814 as a result of the confusion that follows a syntax error!
4815 So make it harmless. */
4816 if (INSN_UID (label) == 0)
4817 {
4818 INSN_UID (label) = cur_insn_uid++;
4819 add_insn (label);
4820 }
4821 return label;
4822}
4823
4824/* Make an insn of code BARRIER
4825 and add it to the end of the doubly-linked list. */
4826
4827rtx
502b8322 4828emit_barrier (void)
23b2ce53 4829{
b3694847 4830 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4831 INSN_UID (barrier) = cur_insn_uid++;
4832 add_insn (barrier);
4833 return barrier;
4834}
4835
5f2fc772 4836/* Emit a copy of note ORIG. */
502b8322 4837
5f2fc772
NS
4838rtx
4839emit_note_copy (rtx orig)
4840{
4841 rtx note;
b8698a0f 4842
5f2fc772 4843 note = rtx_alloc (NOTE);
b8698a0f 4844
5f2fc772
NS
4845 INSN_UID (note) = cur_insn_uid++;
4846 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 4847 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
4848 BLOCK_FOR_INSN (note) = NULL;
4849 add_insn (note);
b8698a0f 4850
2e040219 4851 return note;
23b2ce53
RS
4852}
4853
2e040219
NS
4854/* Make an insn of code NOTE or type NOTE_NO
4855 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4856
4857rtx
a38e7aa5 4858emit_note (enum insn_note kind)
23b2ce53 4859{
b3694847 4860 rtx note;
23b2ce53 4861
23b2ce53
RS
4862 note = rtx_alloc (NOTE);
4863 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4864 NOTE_KIND (note) = kind;
dd107e66 4865 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4866 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4867 add_insn (note);
4868 return note;
4869}
4870
c41c1387
RS
4871/* Emit a clobber of lvalue X. */
4872
4873rtx
4874emit_clobber (rtx x)
4875{
4876 /* CONCATs should not appear in the insn stream. */
4877 if (GET_CODE (x) == CONCAT)
4878 {
4879 emit_clobber (XEXP (x, 0));
4880 return emit_clobber (XEXP (x, 1));
4881 }
4882 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4883}
4884
4885/* Return a sequence of insns to clobber lvalue X. */
4886
4887rtx
4888gen_clobber (rtx x)
4889{
4890 rtx seq;
4891
4892 start_sequence ();
4893 emit_clobber (x);
4894 seq = get_insns ();
4895 end_sequence ();
4896 return seq;
4897}
4898
4899/* Emit a use of rvalue X. */
4900
4901rtx
4902emit_use (rtx x)
4903{
4904 /* CONCATs should not appear in the insn stream. */
4905 if (GET_CODE (x) == CONCAT)
4906 {
4907 emit_use (XEXP (x, 0));
4908 return emit_use (XEXP (x, 1));
4909 }
4910 return emit_insn (gen_rtx_USE (VOIDmode, x));
4911}
4912
4913/* Return a sequence of insns to use rvalue X. */
4914
4915rtx
4916gen_use (rtx x)
4917{
4918 rtx seq;
4919
4920 start_sequence ();
4921 emit_use (x);
4922 seq = get_insns ();
4923 end_sequence ();
4924 return seq;
4925}
4926
23b2ce53 4927/* Cause next statement to emit a line note even if the line number
0cea056b 4928 has not changed. */
23b2ce53
RS
4929
4930void
502b8322 4931force_next_line_note (void)
23b2ce53 4932{
6773e15f 4933 last_location = -1;
23b2ce53 4934}
87b47c85
AM
4935
4936/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4937 note of this type already exists, remove it first. */
87b47c85 4938
3d238248 4939rtx
502b8322 4940set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4941{
4942 rtx note = find_reg_note (insn, kind, NULL_RTX);
4943
52488da1
JW
4944 switch (kind)
4945 {
4946 case REG_EQUAL:
4947 case REG_EQUIV:
4948 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4949 has multiple sets (some callers assume single_set
4950 means the insn only has one set, when in fact it
4951 means the insn only has one * useful * set). */
4952 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4953 {
5b0264cb 4954 gcc_assert (!note);
52488da1
JW
4955 return NULL_RTX;
4956 }
4957
4958 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4959 It serves no useful purpose and breaks eliminate_regs. */
4960 if (GET_CODE (datum) == ASM_OPERANDS)
4961 return NULL_RTX;
6fb5fa3c
DB
4962
4963 if (note)
4964 {
4965 XEXP (note, 0) = datum;
4966 df_notes_rescan (insn);
4967 return note;
4968 }
52488da1
JW
4969 break;
4970
4971 default:
6fb5fa3c
DB
4972 if (note)
4973 {
4974 XEXP (note, 0) = datum;
4975 return note;
4976 }
52488da1
JW
4977 break;
4978 }
3d238248 4979
65c5f2a6 4980 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
4981
4982 switch (kind)
3d238248 4983 {
6fb5fa3c
DB
4984 case REG_EQUAL:
4985 case REG_EQUIV:
4986 df_notes_rescan (insn);
4987 break;
4988 default:
4989 break;
3d238248 4990 }
87b47c85 4991
3d238248 4992 return REG_NOTES (insn);
87b47c85 4993}
23b2ce53
RS
4994\f
4995/* Return an indication of which type of insn should have X as a body.
4996 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4997
d78db459 4998static enum rtx_code
502b8322 4999classify_insn (rtx x)
23b2ce53 5000{
4b4bf941 5001 if (LABEL_P (x))
23b2ce53
RS
5002 return CODE_LABEL;
5003 if (GET_CODE (x) == CALL)
5004 return CALL_INSN;
5005 if (GET_CODE (x) == RETURN)
5006 return JUMP_INSN;
5007 if (GET_CODE (x) == SET)
5008 {
5009 if (SET_DEST (x) == pc_rtx)
5010 return JUMP_INSN;
5011 else if (GET_CODE (SET_SRC (x)) == CALL)
5012 return CALL_INSN;
5013 else
5014 return INSN;
5015 }
5016 if (GET_CODE (x) == PARALLEL)
5017 {
b3694847 5018 int j;
23b2ce53
RS
5019 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5020 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5021 return CALL_INSN;
5022 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5023 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5024 return JUMP_INSN;
5025 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5026 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5027 return CALL_INSN;
5028 }
5029 return INSN;
5030}
5031
5032/* Emit the rtl pattern X as an appropriate kind of insn.
5033 If X is a label, it is simply added into the insn chain. */
5034
5035rtx
502b8322 5036emit (rtx x)
23b2ce53
RS
5037{
5038 enum rtx_code code = classify_insn (x);
5039
5b0264cb 5040 switch (code)
23b2ce53 5041 {
5b0264cb
NS
5042 case CODE_LABEL:
5043 return emit_label (x);
5044 case INSN:
5045 return emit_insn (x);
5046 case JUMP_INSN:
5047 {
5048 rtx insn = emit_jump_insn (x);
5049 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5050 return emit_barrier ();
5051 return insn;
5052 }
5053 case CALL_INSN:
5054 return emit_call_insn (x);
b5b8b0ac
AO
5055 case DEBUG_INSN:
5056 return emit_debug_insn (x);
5b0264cb
NS
5057 default:
5058 gcc_unreachable ();
23b2ce53 5059 }
23b2ce53
RS
5060}
5061\f
e2500fed 5062/* Space for free sequence stack entries. */
1431042e 5063static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5064
4dfa0342
RH
5065/* Begin emitting insns to a sequence. If this sequence will contain
5066 something that might cause the compiler to pop arguments to function
5067 calls (because those pops have previously been deferred; see
5068 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5069 before calling this function. That will ensure that the deferred
5070 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5071
5072void
502b8322 5073start_sequence (void)
23b2ce53
RS
5074{
5075 struct sequence_stack *tem;
5076
e2500fed
GK
5077 if (free_sequence_stack != NULL)
5078 {
5079 tem = free_sequence_stack;
5080 free_sequence_stack = tem->next;
5081 }
5082 else
a9429e29 5083 tem = ggc_alloc_sequence_stack ();
23b2ce53 5084
49ad7cfa 5085 tem->next = seq_stack;
5936d944
JH
5086 tem->first = get_insns ();
5087 tem->last = get_last_insn ();
23b2ce53 5088
49ad7cfa 5089 seq_stack = tem;
23b2ce53 5090
5936d944
JH
5091 set_first_insn (0);
5092 set_last_insn (0);
23b2ce53
RS
5093}
5094
5c7a310f
MM
5095/* Set up the insn chain starting with FIRST as the current sequence,
5096 saving the previously current one. See the documentation for
5097 start_sequence for more information about how to use this function. */
23b2ce53
RS
5098
5099void
502b8322 5100push_to_sequence (rtx first)
23b2ce53
RS
5101{
5102 rtx last;
5103
5104 start_sequence ();
5105
e84a58ff
EB
5106 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5107 ;
23b2ce53 5108
5936d944
JH
5109 set_first_insn (first);
5110 set_last_insn (last);
23b2ce53
RS
5111}
5112
bb27eeda
SE
5113/* Like push_to_sequence, but take the last insn as an argument to avoid
5114 looping through the list. */
5115
5116void
5117push_to_sequence2 (rtx first, rtx last)
5118{
5119 start_sequence ();
5120
5936d944
JH
5121 set_first_insn (first);
5122 set_last_insn (last);
bb27eeda
SE
5123}
5124
f15ae3a1
TW
5125/* Set up the outer-level insn chain
5126 as the current sequence, saving the previously current one. */
5127
5128void
502b8322 5129push_topmost_sequence (void)
f15ae3a1 5130{
aefdd5ab 5131 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5132
5133 start_sequence ();
5134
49ad7cfa 5135 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5136 top = stack;
5137
5936d944
JH
5138 set_first_insn (top->first);
5139 set_last_insn (top->last);
f15ae3a1
TW
5140}
5141
5142/* After emitting to the outer-level insn chain, update the outer-level
5143 insn chain, and restore the previous saved state. */
5144
5145void
502b8322 5146pop_topmost_sequence (void)
f15ae3a1 5147{
aefdd5ab 5148 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5149
49ad7cfa 5150 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5151 top = stack;
5152
5936d944
JH
5153 top->first = get_insns ();
5154 top->last = get_last_insn ();
f15ae3a1
TW
5155
5156 end_sequence ();
5157}
5158
23b2ce53
RS
5159/* After emitting to a sequence, restore previous saved state.
5160
5c7a310f 5161 To get the contents of the sequence just made, you must call
2f937369 5162 `get_insns' *before* calling here.
5c7a310f
MM
5163
5164 If the compiler might have deferred popping arguments while
5165 generating this sequence, and this sequence will not be immediately
5166 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5167 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5168 pops are inserted into this sequence, and not into some random
5169 location in the instruction stream. See INHIBIT_DEFER_POP for more
5170 information about deferred popping of arguments. */
23b2ce53
RS
5171
5172void
502b8322 5173end_sequence (void)
23b2ce53 5174{
49ad7cfa 5175 struct sequence_stack *tem = seq_stack;
23b2ce53 5176
5936d944
JH
5177 set_first_insn (tem->first);
5178 set_last_insn (tem->last);
49ad7cfa 5179 seq_stack = tem->next;
23b2ce53 5180
e2500fed
GK
5181 memset (tem, 0, sizeof (*tem));
5182 tem->next = free_sequence_stack;
5183 free_sequence_stack = tem;
23b2ce53
RS
5184}
5185
5186/* Return 1 if currently emitting into a sequence. */
5187
5188int
502b8322 5189in_sequence_p (void)
23b2ce53 5190{
49ad7cfa 5191 return seq_stack != 0;
23b2ce53 5192}
23b2ce53 5193\f
59ec66dc
MM
5194/* Put the various virtual registers into REGNO_REG_RTX. */
5195
2bbdec73 5196static void
bd60bab2 5197init_virtual_regs (void)
59ec66dc 5198{
bd60bab2
JH
5199 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5200 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5201 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5202 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5203 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5204 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5205 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5206}
5207
da43a810
BS
5208\f
5209/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5210static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5211static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5212static int copy_insn_n_scratches;
5213
5214/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5215 copied an ASM_OPERANDS.
5216 In that case, it is the original input-operand vector. */
5217static rtvec orig_asm_operands_vector;
5218
5219/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5220 copied an ASM_OPERANDS.
5221 In that case, it is the copied input-operand vector. */
5222static rtvec copy_asm_operands_vector;
5223
5224/* Likewise for the constraints vector. */
5225static rtvec orig_asm_constraints_vector;
5226static rtvec copy_asm_constraints_vector;
5227
5228/* Recursively create a new copy of an rtx for copy_insn.
5229 This function differs from copy_rtx in that it handles SCRATCHes and
5230 ASM_OPERANDs properly.
5231 Normally, this function is not used directly; use copy_insn as front end.
5232 However, you could first copy an insn pattern with copy_insn and then use
5233 this function afterwards to properly copy any REG_NOTEs containing
5234 SCRATCHes. */
5235
5236rtx
502b8322 5237copy_insn_1 (rtx orig)
da43a810 5238{
b3694847
SS
5239 rtx copy;
5240 int i, j;
5241 RTX_CODE code;
5242 const char *format_ptr;
da43a810 5243
cd9c1ca8
RH
5244 if (orig == NULL)
5245 return NULL;
5246
da43a810
BS
5247 code = GET_CODE (orig);
5248
5249 switch (code)
5250 {
5251 case REG:
da43a810
BS
5252 case CONST_INT:
5253 case CONST_DOUBLE:
091a3ac7 5254 case CONST_FIXED:
69ef87e2 5255 case CONST_VECTOR:
da43a810
BS
5256 case SYMBOL_REF:
5257 case CODE_LABEL:
5258 case PC:
5259 case CC0:
da43a810 5260 return orig;
3e89ed8d
JH
5261 case CLOBBER:
5262 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5263 return orig;
5264 break;
da43a810
BS
5265
5266 case SCRATCH:
5267 for (i = 0; i < copy_insn_n_scratches; i++)
5268 if (copy_insn_scratch_in[i] == orig)
5269 return copy_insn_scratch_out[i];
5270 break;
5271
5272 case CONST:
6fb5fa3c 5273 if (shared_const_p (orig))
da43a810
BS
5274 return orig;
5275 break;
750c9258 5276
da43a810
BS
5277 /* A MEM with a constant address is not sharable. The problem is that
5278 the constant address may need to be reloaded. If the mem is shared,
5279 then reloading one copy of this mem will cause all copies to appear
5280 to have been reloaded. */
5281
5282 default:
5283 break;
5284 }
5285
aacd3885
RS
5286 /* Copy the various flags, fields, and other information. We assume
5287 that all fields need copying, and then clear the fields that should
da43a810
BS
5288 not be copied. That is the sensible default behavior, and forces
5289 us to explicitly document why we are *not* copying a flag. */
aacd3885 5290 copy = shallow_copy_rtx (orig);
da43a810
BS
5291
5292 /* We do not copy the USED flag, which is used as a mark bit during
5293 walks over the RTL. */
2adc7f12 5294 RTX_FLAG (copy, used) = 0;
da43a810
BS
5295
5296 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5297 if (INSN_P (orig))
da43a810 5298 {
2adc7f12
JJ
5299 RTX_FLAG (copy, jump) = 0;
5300 RTX_FLAG (copy, call) = 0;
5301 RTX_FLAG (copy, frame_related) = 0;
da43a810 5302 }
750c9258 5303
da43a810
BS
5304 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5305
5306 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5307 switch (*format_ptr++)
5308 {
5309 case 'e':
5310 if (XEXP (orig, i) != NULL)
5311 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5312 break;
da43a810 5313
aacd3885
RS
5314 case 'E':
5315 case 'V':
5316 if (XVEC (orig, i) == orig_asm_constraints_vector)
5317 XVEC (copy, i) = copy_asm_constraints_vector;
5318 else if (XVEC (orig, i) == orig_asm_operands_vector)
5319 XVEC (copy, i) = copy_asm_operands_vector;
5320 else if (XVEC (orig, i) != NULL)
5321 {
5322 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5323 for (j = 0; j < XVECLEN (copy, i); j++)
5324 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5325 }
5326 break;
da43a810 5327
aacd3885
RS
5328 case 't':
5329 case 'w':
5330 case 'i':
5331 case 's':
5332 case 'S':
5333 case 'u':
5334 case '0':
5335 /* These are left unchanged. */
5336 break;
da43a810 5337
aacd3885
RS
5338 default:
5339 gcc_unreachable ();
5340 }
da43a810
BS
5341
5342 if (code == SCRATCH)
5343 {
5344 i = copy_insn_n_scratches++;
5b0264cb 5345 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5346 copy_insn_scratch_in[i] = orig;
5347 copy_insn_scratch_out[i] = copy;
5348 }
5349 else if (code == ASM_OPERANDS)
5350 {
6462bb43
AO
5351 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5352 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5353 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5354 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5355 }
5356
5357 return copy;
5358}
5359
5360/* Create a new copy of an rtx.
5361 This function differs from copy_rtx in that it handles SCRATCHes and
5362 ASM_OPERANDs properly.
5363 INSN doesn't really have to be a full INSN; it could be just the
5364 pattern. */
5365rtx
502b8322 5366copy_insn (rtx insn)
da43a810
BS
5367{
5368 copy_insn_n_scratches = 0;
5369 orig_asm_operands_vector = 0;
5370 orig_asm_constraints_vector = 0;
5371 copy_asm_operands_vector = 0;
5372 copy_asm_constraints_vector = 0;
5373 return copy_insn_1 (insn);
5374}
59ec66dc 5375
23b2ce53
RS
5376/* Initialize data structures and variables in this file
5377 before generating rtl for each function. */
5378
5379void
502b8322 5380init_emit (void)
23b2ce53 5381{
5936d944
JH
5382 set_first_insn (NULL);
5383 set_last_insn (NULL);
b5b8b0ac
AO
5384 if (MIN_NONDEBUG_INSN_UID)
5385 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5386 else
5387 cur_insn_uid = 1;
5388 cur_debug_insn_uid = 1;
23b2ce53 5389 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 5390 last_location = UNKNOWN_LOCATION;
23b2ce53 5391 first_label_num = label_num;
49ad7cfa 5392 seq_stack = NULL;
23b2ce53 5393
23b2ce53
RS
5394 /* Init the tables that describe all the pseudo regs. */
5395
3e029763 5396 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5397
3e029763 5398 crtl->emit.regno_pointer_align
1b4572a8 5399 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5400
a9429e29 5401 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
0d4903b8 5402
e50126e8 5403 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5404 memcpy (regno_reg_rtx,
5fb0e246 5405 initial_regno_reg_rtx,
6cde4876 5406 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5407
23b2ce53 5408 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5409 init_virtual_regs ();
740ab4a2
RK
5410
5411 /* Indicate that the virtual registers and stack locations are
5412 all pointers. */
3502dc9c
JDA
5413 REG_POINTER (stack_pointer_rtx) = 1;
5414 REG_POINTER (frame_pointer_rtx) = 1;
5415 REG_POINTER (hard_frame_pointer_rtx) = 1;
5416 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5417
3502dc9c
JDA
5418 REG_POINTER (virtual_incoming_args_rtx) = 1;
5419 REG_POINTER (virtual_stack_vars_rtx) = 1;
5420 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5421 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5422 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5423
86fe05e0 5424#ifdef STACK_BOUNDARY
bdb429a5
RK
5425 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5426 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5427 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5428 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5429
5430 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5431 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5432 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5433 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5434 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5435#endif
5436
5e82e7bd
JVA
5437#ifdef INIT_EXPANDERS
5438 INIT_EXPANDERS;
5439#endif
23b2ce53
RS
5440}
5441
a73b091d 5442/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5443
5444static rtx
a73b091d 5445gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5446{
5447 rtx tem;
5448 rtvec v;
5449 int units, i;
5450 enum machine_mode inner;
5451
5452 units = GET_MODE_NUNITS (mode);
5453 inner = GET_MODE_INNER (mode);
5454
15ed7b52
JG
5455 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5456
69ef87e2
AH
5457 v = rtvec_alloc (units);
5458
a73b091d
JW
5459 /* We need to call this function after we set the scalar const_tiny_rtx
5460 entries. */
5461 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5462
5463 for (i = 0; i < units; ++i)
a73b091d 5464 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5465
a06e3c40 5466 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5467 return tem;
5468}
5469
a06e3c40 5470/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5471 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5472rtx
502b8322 5473gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5474{
a73b091d
JW
5475 enum machine_mode inner = GET_MODE_INNER (mode);
5476 int nunits = GET_MODE_NUNITS (mode);
5477 rtx x;
a06e3c40
R
5478 int i;
5479
a73b091d
JW
5480 /* Check to see if all of the elements have the same value. */
5481 x = RTVEC_ELT (v, nunits - 1);
5482 for (i = nunits - 2; i >= 0; i--)
5483 if (RTVEC_ELT (v, i) != x)
5484 break;
5485
5486 /* If the values are all the same, check to see if we can use one of the
5487 standard constant vectors. */
5488 if (i == -1)
5489 {
5490 if (x == CONST0_RTX (inner))
5491 return CONST0_RTX (mode);
5492 else if (x == CONST1_RTX (inner))
5493 return CONST1_RTX (mode);
5494 }
5495
5496 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5497}
5498
b5deb7b6
SL
5499/* Initialise global register information required by all functions. */
5500
5501void
5502init_emit_regs (void)
5503{
5504 int i;
1c3f523e
RS
5505 enum machine_mode mode;
5506 mem_attrs *attrs;
b5deb7b6
SL
5507
5508 /* Reset register attributes */
5509 htab_empty (reg_attrs_htab);
5510
5511 /* We need reg_raw_mode, so initialize the modes now. */
5512 init_reg_modes_target ();
5513
5514 /* Assign register numbers to the globally defined register rtx. */
3810076b
BS
5515 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5516 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5517 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
b5deb7b6
SL
5518 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5519 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5520 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5521 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5522 virtual_incoming_args_rtx =
5523 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5524 virtual_stack_vars_rtx =
5525 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5526 virtual_stack_dynamic_rtx =
5527 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5528 virtual_outgoing_args_rtx =
5529 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5530 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5531 virtual_preferred_stack_boundary_rtx =
5532 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5533
5534 /* Initialize RTL for commonly used hard registers. These are
5535 copied into regno_reg_rtx as we begin to compile each function. */
5536 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5537 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5538
5539#ifdef RETURN_ADDRESS_POINTER_REGNUM
5540 return_address_pointer_rtx
5541 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5542#endif
5543
b5deb7b6
SL
5544 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5545 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5546 else
5547 pic_offset_table_rtx = NULL_RTX;
1c3f523e
RS
5548
5549 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5550 {
5551 mode = (enum machine_mode) i;
5552 attrs = ggc_alloc_cleared_mem_attrs ();
5553 attrs->align = BITS_PER_UNIT;
5554 attrs->addrspace = ADDR_SPACE_GENERIC;
5555 if (mode != BLKmode)
5556 {
754c3d5d
RS
5557 attrs->size_known_p = true;
5558 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5559 if (STRICT_ALIGNMENT)
5560 attrs->align = GET_MODE_ALIGNMENT (mode);
5561 }
5562 mode_mem_attrs[i] = attrs;
5563 }
b5deb7b6
SL
5564}
5565
2d888286 5566/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5567
5568void
2d888286 5569init_emit_once (void)
23b2ce53
RS
5570{
5571 int i;
5572 enum machine_mode mode;
9ec36da5 5573 enum machine_mode double_mode;
23b2ce53 5574
091a3ac7
CF
5575 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5576 hash tables. */
17211ab5
GK
5577 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5578 const_int_htab_eq, NULL);
173b24b9 5579
17211ab5
GK
5580 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5581 const_double_htab_eq, NULL);
5692c7bc 5582
091a3ac7
CF
5583 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5584 const_fixed_htab_eq, NULL);
5585
17211ab5
GK
5586 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5587 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5588 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5589 reg_attrs_htab_eq, NULL);
67673f5c 5590
43fa6302
AS
5591 /* Compute the word and byte modes. */
5592
5593 byte_mode = VOIDmode;
5594 word_mode = VOIDmode;
5595 double_mode = VOIDmode;
5596
15ed7b52
JG
5597 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5598 mode != VOIDmode;
43fa6302
AS
5599 mode = GET_MODE_WIDER_MODE (mode))
5600 {
5601 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5602 && byte_mode == VOIDmode)
5603 byte_mode = mode;
5604
5605 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5606 && word_mode == VOIDmode)
5607 word_mode = mode;
5608 }
5609
15ed7b52
JG
5610 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5611 mode != VOIDmode;
43fa6302
AS
5612 mode = GET_MODE_WIDER_MODE (mode))
5613 {
5614 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5615 && double_mode == VOIDmode)
5616 double_mode = mode;
5617 }
5618
5619 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5620
5da077de 5621#ifdef INIT_EXPANDERS
414c4dc4
NC
5622 /* This is to initialize {init|mark|free}_machine_status before the first
5623 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5624 end which calls push_function_context_to before the first call to
5da077de
AS
5625 init_function_start. */
5626 INIT_EXPANDERS;
5627#endif
5628
23b2ce53
RS
5629 /* Create the unique rtx's for certain rtx codes and operand values. */
5630
a2a8cc44 5631 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5632 tries to use these variables. */
23b2ce53 5633 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5634 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5635 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5636
68d75312
JC
5637 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5638 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5639 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5640 else
3b80f6ca 5641 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5642
5692c7bc
ZW
5643 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5644 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5645 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5646
5647 dconstm1 = dconst1;
5648 dconstm1.sign = 1;
03f2ea93
RS
5649
5650 dconsthalf = dconst1;
1e92bbb9 5651 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5652
f7657db9 5653 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5654 {
aefa9d43 5655 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5656 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5657
15ed7b52
JG
5658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5659 mode != VOIDmode;
5660 mode = GET_MODE_WIDER_MODE (mode))
5661 const_tiny_rtx[i][(int) mode] =
5662 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5663
5664 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5665 mode != VOIDmode;
23b2ce53 5666 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5667 const_tiny_rtx[i][(int) mode] =
5668 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5669
906c4e36 5670 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5671
15ed7b52
JG
5672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5673 mode != VOIDmode;
23b2ce53 5674 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5675 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5676
5677 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5678 mode != VOIDmode;
5679 mode = GET_MODE_WIDER_MODE (mode))
5680 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5681 }
5682
e90721b1
AP
5683 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5684 mode != VOIDmode;
5685 mode = GET_MODE_WIDER_MODE (mode))
5686 {
5687 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5688 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5689 }
5690
5691 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5692 mode != VOIDmode;
5693 mode = GET_MODE_WIDER_MODE (mode))
5694 {
5695 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5696 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5697 }
5698
69ef87e2
AH
5699 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5700 mode != VOIDmode;
5701 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5702 {
5703 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5704 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5705 }
69ef87e2
AH
5706
5707 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5708 mode != VOIDmode;
5709 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5710 {
5711 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5712 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5713 }
69ef87e2 5714
325217ed
CF
5715 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5716 mode != VOIDmode;
5717 mode = GET_MODE_WIDER_MODE (mode))
5718 {
5719 FCONST0(mode).data.high = 0;
5720 FCONST0(mode).data.low = 0;
5721 FCONST0(mode).mode = mode;
091a3ac7
CF
5722 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5723 FCONST0 (mode), mode);
325217ed
CF
5724 }
5725
5726 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5727 mode != VOIDmode;
5728 mode = GET_MODE_WIDER_MODE (mode))
5729 {
5730 FCONST0(mode).data.high = 0;
5731 FCONST0(mode).data.low = 0;
5732 FCONST0(mode).mode = mode;
091a3ac7
CF
5733 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5734 FCONST0 (mode), mode);
325217ed
CF
5735 }
5736
5737 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5738 mode != VOIDmode;
5739 mode = GET_MODE_WIDER_MODE (mode))
5740 {
5741 FCONST0(mode).data.high = 0;
5742 FCONST0(mode).data.low = 0;
5743 FCONST0(mode).mode = mode;
091a3ac7
CF
5744 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5745 FCONST0 (mode), mode);
325217ed
CF
5746
5747 /* We store the value 1. */
5748 FCONST1(mode).data.high = 0;
5749 FCONST1(mode).data.low = 0;
5750 FCONST1(mode).mode = mode;
5751 lshift_double (1, 0, GET_MODE_FBIT (mode),
5752 2 * HOST_BITS_PER_WIDE_INT,
5753 &FCONST1(mode).data.low,
5754 &FCONST1(mode).data.high,
5755 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5756 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5757 FCONST1 (mode), mode);
325217ed
CF
5758 }
5759
5760 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5761 mode != VOIDmode;
5762 mode = GET_MODE_WIDER_MODE (mode))
5763 {
5764 FCONST0(mode).data.high = 0;
5765 FCONST0(mode).data.low = 0;
5766 FCONST0(mode).mode = mode;
091a3ac7
CF
5767 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5768 FCONST0 (mode), mode);
325217ed
CF
5769
5770 /* We store the value 1. */
5771 FCONST1(mode).data.high = 0;
5772 FCONST1(mode).data.low = 0;
5773 FCONST1(mode).mode = mode;
5774 lshift_double (1, 0, GET_MODE_FBIT (mode),
5775 2 * HOST_BITS_PER_WIDE_INT,
5776 &FCONST1(mode).data.low,
5777 &FCONST1(mode).data.high,
5778 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5779 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5780 FCONST1 (mode), mode);
5781 }
5782
5783 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5784 mode != VOIDmode;
5785 mode = GET_MODE_WIDER_MODE (mode))
5786 {
5787 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5788 }
5789
5790 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5791 mode != VOIDmode;
5792 mode = GET_MODE_WIDER_MODE (mode))
5793 {
5794 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5795 }
5796
5797 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5798 mode != VOIDmode;
5799 mode = GET_MODE_WIDER_MODE (mode))
5800 {
5801 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5802 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5803 }
5804
5805 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5806 mode != VOIDmode;
5807 mode = GET_MODE_WIDER_MODE (mode))
5808 {
5809 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5810 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5811 }
5812
dbbbbf3b
JDA
5813 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5814 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5815 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5816
f0417c82
RH
5817 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5818 if (STORE_FLAG_VALUE == 1)
5819 const_tiny_rtx[1][(int) BImode] = const1_rtx;
23b2ce53 5820}
a11759a3 5821\f
969d70ca
JH
5822/* Produce exact duplicate of insn INSN after AFTER.
5823 Care updating of libcall regions if present. */
5824
5825rtx
502b8322 5826emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5827{
60564289 5828 rtx new_rtx, link;
969d70ca
JH
5829
5830 switch (GET_CODE (insn))
5831 {
5832 case INSN:
60564289 5833 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5834 break;
5835
5836 case JUMP_INSN:
60564289 5837 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5838 break;
5839
b5b8b0ac
AO
5840 case DEBUG_INSN:
5841 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5842 break;
5843
969d70ca 5844 case CALL_INSN:
60564289 5845 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5846 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5847 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5848 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5849 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5850 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5851 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 5852 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 5853 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
5854 break;
5855
5856 default:
5b0264cb 5857 gcc_unreachable ();
969d70ca
JH
5858 }
5859
5860 /* Update LABEL_NUSES. */
60564289 5861 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 5862
60564289 5863 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ba4f7968 5864
0a3d71f5
JW
5865 /* If the old insn is frame related, then so is the new one. This is
5866 primarily needed for IA-64 unwind info which marks epilogue insns,
5867 which may be duplicated by the basic block reordering code. */
60564289 5868 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 5869
cf7c4aa6
HPN
5870 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5871 will make them. REG_LABEL_TARGETs are created there too, but are
5872 supposed to be sticky, so we copy them. */
969d70ca 5873 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 5874 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
5875 {
5876 if (GET_CODE (link) == EXPR_LIST)
60564289 5877 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 5878 copy_insn_1 (XEXP (link, 0)));
969d70ca 5879 else
60564289 5880 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
5881 }
5882
60564289
KG
5883 INSN_CODE (new_rtx) = INSN_CODE (insn);
5884 return new_rtx;
969d70ca 5885}
e2500fed 5886
1431042e 5887static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5888rtx
5889gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5890{
5891 if (hard_reg_clobbers[mode][regno])
5892 return hard_reg_clobbers[mode][regno];
5893 else
5894 return (hard_reg_clobbers[mode][regno] =
5895 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5896}
5897
e2500fed 5898#include "gt-emit-rtl.h"