]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
gcc/
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
ddca3e9d 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3e052aec 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
18b7c118 4 2010, 2011
535664e3 5 Free Software Foundation, Inc.
15bbde2b 6
f12b58b3 7This file is part of GCC.
15bbde2b 8
f12b58b3 9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
8c4c00c1 11Software Foundation; either version 3, or (at your option) any later
f12b58b3 12version.
15bbde2b 13
f12b58b3 14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
15bbde2b 18
19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
15bbde2b 22
23
24/* Middle-to-low level generation of rtx code and insns.
25
74efa612 26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
15bbde2b 28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
74efa612 31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
8fd5918e 34 dependent is the kind of rtx's they make and what arguments they
35 use. */
15bbde2b 36
37#include "config.h"
405711de 38#include "system.h"
805e22b2 39#include "coretypes.h"
40#include "tm.h"
0b205f4c 41#include "diagnostic-core.h"
15bbde2b 42#include "rtl.h"
3fd7e17f 43#include "tree.h"
7953c610 44#include "tm_p.h"
15bbde2b 45#include "flags.h"
46#include "function.h"
47#include "expr.h"
48#include "regs.h"
c6b6c51f 49#include "hard-reg-set.h"
73f5c1e3 50#include "hashtab.h"
15bbde2b 51#include "insn-config.h"
0dbd1c74 52#include "recog.h"
a3426c4c 53#include "bitmap.h"
f3d96a58 54#include "basic-block.h"
a7b0c170 55#include "ggc.h"
b29760a8 56#include "debug.h"
b0278d39 57#include "langhooks.h"
77fce4cd 58#include "tree-pass.h"
3072d30e 59#include "df.h"
9845d120 60#include "params.h"
98155838 61#include "target.h"
73eb0a09 62#include "tree-flow.h"
649d8da6 63
679bcc8d 64struct target_rtl default_target_rtl;
65#if SWITCHABLE_TARGET
66struct target_rtl *this_target_rtl = &default_target_rtl;
67#endif
68
69#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
399d45d3 71/* Commonly used modes. */
72
a92771b8 73enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9e042f31 75enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
a92771b8 76enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 77
b079a207 78/* Datastructures maintained for currently processed function in RTL form. */
79
fd6ffb7c 80struct rtl_data x_rtl;
b079a207 81
82/* Indexed by pseudo register number, gives the rtx for that pseudo.
48e1416a 83 Allocated in parallel with regno_pointer_align.
b079a207 84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87rtx * regno_reg_rtx;
15bbde2b 88
89/* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
9105005a 92static GTY(()) int label_num = 1;
15bbde2b 93
15bbde2b 94/* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
97
98rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99
1a60f06a 100rtx const_true_rtx;
101
15bbde2b 102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
77e89269 106REAL_VALUE_TYPE dconsthalf;
15bbde2b 107
06f0b99c 108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
15bbde2b 112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
57c097d5 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 118
73f5c1e3 119/* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
121
1f3233d1 122static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
123 htab_t const_int_htab;
73f5c1e3 124
c6259b83 125/* A hash table storing memory attribute structures. */
1f3233d1 126static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
127 htab_t mem_attrs_htab;
c6259b83 128
ca74b940 129/* A hash table storing register attribute structures. */
130static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
132
2ff23ed0 133/* A hash table storing all CONST_DOUBLEs. */
1f3233d1 134static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
2ff23ed0 136
e397ad8e 137/* A hash table storing all CONST_FIXEDs. */
138static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
140
fd6ffb7c 141#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
9845d120 142#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
fd6ffb7c 143#define last_location (crtl->emit.x_last_location)
144#define first_label_num (crtl->emit.x_first_label_num)
15bbde2b 145
35cb5232 146static rtx make_call_insn_raw (rtx);
35cb5232 147static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
265be050 148static void set_used_decls (tree);
35cb5232 149static void mark_label_nuses (rtx);
150static hashval_t const_int_htab_hash (const void *);
151static int const_int_htab_eq (const void *, const void *);
152static hashval_t const_double_htab_hash (const void *);
153static int const_double_htab_eq (const void *, const void *);
154static rtx lookup_const_double (rtx);
e397ad8e 155static hashval_t const_fixed_htab_hash (const void *);
156static int const_fixed_htab_eq (const void *, const void *);
157static rtx lookup_const_fixed (rtx);
35cb5232 158static hashval_t mem_attrs_htab_hash (const void *);
159static int mem_attrs_htab_eq (const void *, const void *);
35cb5232 160static hashval_t reg_attrs_htab_hash (const void *);
161static int reg_attrs_htab_eq (const void *, const void *);
162static reg_attrs *get_reg_attrs (tree, int);
6e68dcb2 163static rtx gen_const_vector (enum machine_mode, int);
0e0727c4 164static void copy_rtx_if_shared_1 (rtx *orig);
73f5c1e3 165
3cd757b1 166/* Probability of the conditional branch currently proceeded by try_split.
167 Set to -1 otherwise. */
168int split_branch_probability = -1;
649d8da6 169\f
73f5c1e3 170/* Returns a hash code for X (which is a really a CONST_INT). */
171
172static hashval_t
35cb5232 173const_int_htab_hash (const void *x)
73f5c1e3 174{
dd9b9fc5 175 return (hashval_t) INTVAL ((const_rtx) x);
73f5c1e3 176}
177
6ef828f9 178/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 179 CONST_INT) is the same as that given by Y (which is really a
180 HOST_WIDE_INT *). */
181
182static int
35cb5232 183const_int_htab_eq (const void *x, const void *y)
73f5c1e3 184{
dd9b9fc5 185 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
2ff23ed0 186}
187
188/* Returns a hash code for X (which is really a CONST_DOUBLE). */
189static hashval_t
35cb5232 190const_double_htab_hash (const void *x)
2ff23ed0 191{
dd9b9fc5 192 const_rtx const value = (const_rtx) x;
3393215f 193 hashval_t h;
2ff23ed0 194
3393215f 195 if (GET_MODE (value) == VOIDmode)
196 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
197 else
a5760913 198 {
e2e205b3 199 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 200 /* MODE is used in the comparison, so it should be in the hash. */
201 h ^= GET_MODE (value);
202 }
2ff23ed0 203 return h;
204}
205
6ef828f9 206/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 207 is the same as that represented by Y (really a ...) */
208static int
35cb5232 209const_double_htab_eq (const void *x, const void *y)
2ff23ed0 210{
dd9b9fc5 211 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
2ff23ed0 212
213 if (GET_MODE (a) != GET_MODE (b))
214 return 0;
f82a103d 215 if (GET_MODE (a) == VOIDmode)
216 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
217 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
218 else
219 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
220 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 221}
222
e397ad8e 223/* Returns a hash code for X (which is really a CONST_FIXED). */
224
225static hashval_t
226const_fixed_htab_hash (const void *x)
227{
a9f1838b 228 const_rtx const value = (const_rtx) x;
e397ad8e 229 hashval_t h;
230
231 h = fixed_hash (CONST_FIXED_VALUE (value));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h ^= GET_MODE (value);
234 return h;
235}
236
237/* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...). */
239
240static int
241const_fixed_htab_eq (const void *x, const void *y)
242{
a9f1838b 243 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
e397ad8e 244
245 if (GET_MODE (a) != GET_MODE (b))
246 return 0;
247 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
248}
249
c6259b83 250/* Returns a hash code for X (which is a really a mem_attrs *). */
251
252static hashval_t
35cb5232 253mem_attrs_htab_hash (const void *x)
c6259b83 254{
dd9b9fc5 255 const mem_attrs *const p = (const mem_attrs *) x;
c6259b83 256
257 return (p->alias ^ (p->align * 1000)
bd1a81f7 258 ^ (p->addrspace * 4000)
c6259b83 259 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
260 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
2f16183e 261 ^ (size_t) iterative_hash_expr (p->expr, 0));
c6259b83 262}
263
d72886b5 264/* Return true if the given memory attributes are equal. */
73f5c1e3 265
d72886b5 266static bool
267mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
73f5c1e3 268{
2f16183e 269 return (p->alias == q->alias && p->offset == q->offset
270 && p->size == q->size && p->align == q->align
bd1a81f7 271 && p->addrspace == q->addrspace
2f16183e 272 && (p->expr == q->expr
273 || (p->expr != NULL_TREE && q->expr != NULL_TREE
274 && operand_equal_p (p->expr, q->expr, 0))));
73f5c1e3 275}
276
d72886b5 277/* Returns nonzero if the value represented by X (which is really a
278 mem_attrs *) is the same as that given by Y (which is also really a
279 mem_attrs *). */
c6259b83 280
d72886b5 281static int
282mem_attrs_htab_eq (const void *x, const void *y)
c6259b83 283{
d72886b5 284 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
285}
c6259b83 286
d72886b5 287/* Set MEM's memory attributes so that they are the same as ATTRS. */
5cc193e7 288
d72886b5 289static void
290set_mem_attrs (rtx mem, mem_attrs *attrs)
291{
292 void **slot;
293
294 /* If everything is the default, we can just clear the attributes. */
295 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
296 {
297 MEM_ATTRS (mem) = 0;
298 return;
299 }
c6259b83 300
d72886b5 301 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
c6259b83 302 if (*slot == 0)
303 {
ba72912a 304 *slot = ggc_alloc_mem_attrs ();
d72886b5 305 memcpy (*slot, attrs, sizeof (mem_attrs));
c6259b83 306 }
307
d72886b5 308 MEM_ATTRS (mem) = (mem_attrs *) *slot;
73f5c1e3 309}
310
ca74b940 311/* Returns a hash code for X (which is a really a reg_attrs *). */
312
313static hashval_t
35cb5232 314reg_attrs_htab_hash (const void *x)
ca74b940 315{
aae87fc3 316 const reg_attrs *const p = (const reg_attrs *) x;
ca74b940 317
e19e0a33 318 return ((p->offset * 1000) ^ (intptr_t) p->decl);
ca74b940 319}
320
7ef5b942 321/* Returns nonzero if the value represented by X (which is really a
ca74b940 322 reg_attrs *) is the same as that given by Y (which is also really a
323 reg_attrs *). */
324
325static int
35cb5232 326reg_attrs_htab_eq (const void *x, const void *y)
ca74b940 327{
aae87fc3 328 const reg_attrs *const p = (const reg_attrs *) x;
329 const reg_attrs *const q = (const reg_attrs *) y;
ca74b940 330
331 return (p->decl == q->decl && p->offset == q->offset);
332}
333/* Allocate a new reg_attrs structure and insert it into the hash table if
334 one identical to it is not already in the table. We are doing this for
335 MEM of mode MODE. */
336
337static reg_attrs *
35cb5232 338get_reg_attrs (tree decl, int offset)
ca74b940 339{
340 reg_attrs attrs;
341 void **slot;
342
343 /* If everything is the default, we can just return zero. */
344 if (decl == 0 && offset == 0)
345 return 0;
346
347 attrs.decl = decl;
348 attrs.offset = offset;
349
350 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
351 if (*slot == 0)
352 {
ba72912a 353 *slot = ggc_alloc_reg_attrs ();
ca74b940 354 memcpy (*slot, &attrs, sizeof (reg_attrs));
355 }
356
2457c754 357 return (reg_attrs *) *slot;
ca74b940 358}
359
3072d30e 360
361#if !HAVE_blockage
362/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
363 across this insn. */
364
365rtx
366gen_blockage (void)
367{
368 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
369 MEM_VOLATILE_P (x) = true;
370 return x;
371}
372#endif
373
374
22cf44bc 375/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
376 don't attempt to share with the various global pieces of rtl (such as
377 frame_pointer_rtx). */
378
379rtx
35cb5232 380gen_raw_REG (enum machine_mode mode, int regno)
22cf44bc 381{
382 rtx x = gen_rtx_raw_REG (mode, regno);
383 ORIGINAL_REGNO (x) = regno;
384 return x;
385}
386
7014838c 387/* There are some RTL codes that require special attention; the generation
388 functions do the raw handling. If you add to this list, modify
389 special_rtx in gengenrtl.c as well. */
390
3ad7bb1c 391rtx
35cb5232 392gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3ad7bb1c 393{
73f5c1e3 394 void **slot;
395
3ad7bb1c 396 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 397 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 398
399#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
400 if (const_true_rtx && arg == STORE_FLAG_VALUE)
401 return const_true_rtx;
402#endif
403
73f5c1e3 404 /* Look up the CONST_INT in the hash table. */
2b3dbc20 405 slot = htab_find_slot_with_hash (const_int_htab, &arg,
406 (hashval_t) arg, INSERT);
7f2875d3 407 if (*slot == 0)
d7c47c0e 408 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 409
410 return (rtx) *slot;
3ad7bb1c 411}
412
2d232d05 413rtx
35cb5232 414gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2d232d05 415{
416 return GEN_INT (trunc_int_for_mode (c, mode));
417}
418
2ff23ed0 419/* CONST_DOUBLEs might be created from pairs of integers, or from
420 REAL_VALUE_TYPEs. Also, their length is known only at run time,
421 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
422
423/* Determine whether REAL, a CONST_DOUBLE, already exists in the
424 hash table. If so, return its counterpart; otherwise add it
425 to the hash table and return it. */
426static rtx
35cb5232 427lookup_const_double (rtx real)
2ff23ed0 428{
429 void **slot = htab_find_slot (const_double_htab, real, INSERT);
430 if (*slot == 0)
431 *slot = real;
432
433 return (rtx) *slot;
434}
7f2875d3 435
2ff23ed0 436/* Return a CONST_DOUBLE rtx for a floating-point value specified by
437 VALUE in mode MODE. */
67f2a2eb 438rtx
35cb5232 439const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
67f2a2eb 440{
2ff23ed0 441 rtx real = rtx_alloc (CONST_DOUBLE);
442 PUT_MODE (real, mode);
443
e8aaae4e 444 real->u.rv = value;
2ff23ed0 445
446 return lookup_const_double (real);
447}
448
e397ad8e 449/* Determine whether FIXED, a CONST_FIXED, already exists in the
450 hash table. If so, return its counterpart; otherwise add it
451 to the hash table and return it. */
452
453static rtx
454lookup_const_fixed (rtx fixed)
455{
456 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
457 if (*slot == 0)
458 *slot = fixed;
459
460 return (rtx) *slot;
461}
462
463/* Return a CONST_FIXED rtx for a fixed-point value specified by
464 VALUE in mode MODE. */
465
466rtx
467const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
468{
469 rtx fixed = rtx_alloc (CONST_FIXED);
470 PUT_MODE (fixed, mode);
471
472 fixed->u.fv = value;
473
474 return lookup_const_fixed (fixed);
475}
476
33274180 477/* Constructs double_int from rtx CST. */
478
479double_int
480rtx_to_double_int (const_rtx cst)
481{
482 double_int r;
483
484 if (CONST_INT_P (cst))
485 r = shwi_to_double_int (INTVAL (cst));
486 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
487 {
488 r.low = CONST_DOUBLE_LOW (cst);
489 r.high = CONST_DOUBLE_HIGH (cst);
490 }
491 else
492 gcc_unreachable ();
493
494 return r;
495}
496
497
3e052aec 498/* Return a CONST_DOUBLE or CONST_INT for a value specified as
499 a double_int. */
500
501rtx
502immed_double_int_const (double_int i, enum machine_mode mode)
503{
504 return immed_double_const (i.low, i.high, mode);
505}
506
2ff23ed0 507/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
508 of ints: I0 is the low-order word and I1 is the high-order word.
509 Do not use this routine for non-integer modes; convert to
510 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
511
512rtx
35cb5232 513immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
2ff23ed0 514{
515 rtx value;
516 unsigned int i;
517
b1ca4af4 518 /* There are the following cases (note that there are no modes with
519 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
520
521 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
522 gen_int_mode.
523 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
524 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
48e1416a 525 from copies of the sign bit, and sign of i0 and i1 are the same), then
b1ca4af4 526 we return a CONST_INT for i0.
527 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
2ff23ed0 528 if (mode != VOIDmode)
529 {
611234b4 530 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
531 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
532 /* We can get a 0 for an error mark. */
533 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
534 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
2ff23ed0 535
b1ca4af4 536 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
537 return gen_int_mode (i0, mode);
538
539 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
2ff23ed0 540 }
541
542 /* If this integer fits in one word, return a CONST_INT. */
543 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
544 return GEN_INT (i0);
545
546 /* We use VOIDmode for integers. */
547 value = rtx_alloc (CONST_DOUBLE);
548 PUT_MODE (value, VOIDmode);
549
550 CONST_DOUBLE_LOW (value) = i0;
551 CONST_DOUBLE_HIGH (value) = i1;
552
553 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
554 XWINT (value, i) = 0;
555
556 return lookup_const_double (value);
67f2a2eb 557}
558
3ad7bb1c 559rtx
35cb5232 560gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3ad7bb1c 561{
562 /* In case the MD file explicitly references the frame pointer, have
563 all such references point to the same frame pointer. This is
564 used during frame pointer elimination to distinguish the explicit
565 references to these registers from pseudos that happened to be
566 assigned to them.
567
568 If we have eliminated the frame pointer or arg pointer, we will
569 be using it as a normal register, for example as a spill
570 register. In such cases, we might be accessing it in a mode that
571 is not Pmode and therefore cannot use the pre-allocated rtx.
572
573 Also don't do this when we are making new REGs in reload, since
574 we don't want to get confused with the real pointers. */
575
576 if (mode == Pmode && !reload_in_progress)
577 {
71801afc 578 if (regno == FRAME_POINTER_REGNUM
579 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 580 return frame_pointer_rtx;
5ae82d58 581#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
71801afc 582 if (regno == HARD_FRAME_POINTER_REGNUM
583 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 584 return hard_frame_pointer_rtx;
585#endif
5ae82d58 586#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
e8b59353 587 if (regno == ARG_POINTER_REGNUM)
3ad7bb1c 588 return arg_pointer_rtx;
589#endif
590#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 591 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 592 return return_address_pointer_rtx;
593#endif
3473aefe 594 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
8d43ad05 595 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
6ea47475 596 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 597 return pic_offset_table_rtx;
e8b59353 598 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 599 return stack_pointer_rtx;
600 }
601
32b53d83 602#if 0
90295bd2 603 /* If the per-function register table has been set up, try to re-use
32b53d83 604 an existing entry in that table to avoid useless generation of RTL.
605
606 This code is disabled for now until we can fix the various backends
607 which depend on having non-shared hard registers in some cases. Long
608 term we want to re-enable this code as it can significantly cut down
71801afc 609 on the amount of useless RTL that gets generated.
610
611 We'll also need to fix some code that runs after reload that wants to
612 set ORIGINAL_REGNO. */
613
90295bd2 614 if (cfun
615 && cfun->emit
616 && regno_reg_rtx
617 && regno < FIRST_PSEUDO_REGISTER
618 && reg_raw_mode[regno] == mode)
619 return regno_reg_rtx[regno];
32b53d83 620#endif
90295bd2 621
22cf44bc 622 return gen_raw_REG (mode, regno);
3ad7bb1c 623}
624
b5ba9f3a 625rtx
35cb5232 626gen_rtx_MEM (enum machine_mode mode, rtx addr)
b5ba9f3a 627{
628 rtx rt = gen_rtx_raw_MEM (mode, addr);
629
630 /* This field is not cleared by the mere allocation of the rtx, so
631 we clear it here. */
c6259b83 632 MEM_ATTRS (rt) = 0;
b5ba9f3a 633
634 return rt;
635}
701e46d0 636
e265a6da 637/* Generate a memory referring to non-trapping constant memory. */
638
639rtx
640gen_const_mem (enum machine_mode mode, rtx addr)
641{
642 rtx mem = gen_rtx_MEM (mode, addr);
643 MEM_READONLY_P (mem) = 1;
644 MEM_NOTRAP_P (mem) = 1;
645 return mem;
646}
647
00060fc2 648/* Generate a MEM referring to fixed portions of the frame, e.g., register
649 save areas. */
650
651rtx
652gen_frame_mem (enum machine_mode mode, rtx addr)
653{
654 rtx mem = gen_rtx_MEM (mode, addr);
655 MEM_NOTRAP_P (mem) = 1;
656 set_mem_alias_set (mem, get_frame_alias_set ());
657 return mem;
658}
659
660/* Generate a MEM referring to a temporary use of the stack, not part
661 of the fixed stack frame. For example, something which is pushed
662 by a target splitter. */
663rtx
664gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
665{
666 rtx mem = gen_rtx_MEM (mode, addr);
667 MEM_NOTRAP_P (mem) = 1;
18d50ae6 668 if (!cfun->calls_alloca)
00060fc2 669 set_mem_alias_set (mem, get_frame_alias_set ());
670 return mem;
671}
672
2166bbaa 673/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
674 this construct would be valid, and false otherwise. */
675
676bool
677validate_subreg (enum machine_mode omode, enum machine_mode imode,
7ecb5bb2 678 const_rtx reg, unsigned int offset)
701e46d0 679{
2166bbaa 680 unsigned int isize = GET_MODE_SIZE (imode);
681 unsigned int osize = GET_MODE_SIZE (omode);
682
683 /* All subregs must be aligned. */
684 if (offset % osize != 0)
685 return false;
686
687 /* The subreg offset cannot be outside the inner object. */
688 if (offset >= isize)
689 return false;
690
691 /* ??? This should not be here. Temporarily continue to allow word_mode
692 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
693 Generally, backends are doing something sketchy but it'll take time to
694 fix them all. */
695 if (omode == word_mode)
696 ;
697 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
698 is the culprit here, and not the backends. */
699 else if (osize >= UNITS_PER_WORD && isize >= osize)
700 ;
701 /* Allow component subregs of complex and vector. Though given the below
702 extraction rules, it's not always clear what that means. */
703 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
704 && GET_MODE_INNER (imode) == omode)
705 ;
706 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
707 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
708 represent this. It's questionable if this ought to be represented at
709 all -- why can't this all be hidden in post-reload splitters that make
710 arbitrarily mode changes to the registers themselves. */
711 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
712 ;
713 /* Subregs involving floating point modes are not allowed to
714 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
715 (subreg:SI (reg:DF) 0) isn't. */
716 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
717 {
718 if (isize != osize)
719 return false;
720 }
701e46d0 721
2166bbaa 722 /* Paradoxical subregs must have offset zero. */
723 if (osize > isize)
724 return offset == 0;
725
726 /* This is a normal subreg. Verify that the offset is representable. */
727
728 /* For hard registers, we already have most of these rules collected in
729 subreg_offset_representable_p. */
730 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
731 {
732 unsigned int regno = REGNO (reg);
733
734#ifdef CANNOT_CHANGE_MODE_CLASS
735 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
736 && GET_MODE_INNER (imode) == omode)
737 ;
738 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
739 return false;
701e46d0 740#endif
2166bbaa 741
742 return subreg_offset_representable_p (regno, imode, offset, omode);
743 }
744
745 /* For pseudo registers, we want most of the same checks. Namely:
746 If the register no larger than a word, the subreg must be lowpart.
747 If the register is larger than a word, the subreg must be the lowpart
748 of a subword. A subreg does *not* perform arbitrary bit extraction.
749 Given that we've already checked mode/offset alignment, we only have
750 to check subword subregs here. */
751 if (osize < UNITS_PER_WORD)
752 {
753 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
754 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
755 if (offset % UNITS_PER_WORD != low_off)
756 return false;
757 }
758 return true;
759}
760
761rtx
762gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
763{
764 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
2ff23ed0 765 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 766}
767
c6259b83 768/* Generate a SUBREG representing the least-significant part of REG if MODE
769 is smaller than mode of REG, otherwise paradoxical SUBREG. */
770
701e46d0 771rtx
35cb5232 772gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
701e46d0 773{
774 enum machine_mode inmode;
701e46d0 775
776 inmode = GET_MODE (reg);
777 if (inmode == VOIDmode)
778 inmode = mode;
81802af6 779 return gen_rtx_SUBREG (mode, reg,
780 subreg_lowpart_offset (mode, inmode));
701e46d0 781}
7014838c 782\f
15bbde2b 783
cf9ac040 784/* Create an rtvec and stores within it the RTXen passed in the arguments. */
785
15bbde2b 786rtvec
ee582a61 787gen_rtvec (int n, ...)
15bbde2b 788{
cf9ac040 789 int i;
790 rtvec rt_val;
ee582a61 791 va_list p;
15bbde2b 792
ee582a61 793 va_start (p, n);
15bbde2b 794
cf9ac040 795 /* Don't allocate an empty rtvec... */
15bbde2b 796 if (n == 0)
451c8e2f 797 {
798 va_end (p);
799 return NULL_RTVEC;
800 }
15bbde2b 801
cf9ac040 802 rt_val = rtvec_alloc (n);
e5fcd76a 803
15bbde2b 804 for (i = 0; i < n; i++)
cf9ac040 805 rt_val->elem[i] = va_arg (p, rtx);
7ad77798 806
ee582a61 807 va_end (p);
cf9ac040 808 return rt_val;
15bbde2b 809}
810
811rtvec
35cb5232 812gen_rtvec_v (int n, rtx *argp)
15bbde2b 813{
19cb6b50 814 int i;
815 rtvec rt_val;
15bbde2b 816
cf9ac040 817 /* Don't allocate an empty rtvec... */
15bbde2b 818 if (n == 0)
cf9ac040 819 return NULL_RTVEC;
15bbde2b 820
cf9ac040 821 rt_val = rtvec_alloc (n);
15bbde2b 822
823 for (i = 0; i < n; i++)
a4070a91 824 rt_val->elem[i] = *argp++;
15bbde2b 825
826 return rt_val;
827}
828\f
80c70e76 829/* Return the number of bytes between the start of an OUTER_MODE
830 in-memory value and the start of an INNER_MODE in-memory value,
831 given that the former is a lowpart of the latter. It may be a
832 paradoxical lowpart, in which case the offset will be negative
833 on big-endian targets. */
834
835int
836byte_lowpart_offset (enum machine_mode outer_mode,
837 enum machine_mode inner_mode)
838{
839 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
840 return subreg_lowpart_offset (outer_mode, inner_mode);
841 else
842 return -subreg_lowpart_offset (inner_mode, outer_mode);
843}
844\f
15bbde2b 845/* Generate a REG rtx for a new pseudo register of mode MODE.
846 This pseudo is assigned the next sequential register number. */
847
848rtx
35cb5232 849gen_reg_rtx (enum machine_mode mode)
15bbde2b 850{
19cb6b50 851 rtx val;
27a7a23a 852 unsigned int align = GET_MODE_ALIGNMENT (mode);
15bbde2b 853
1b7ff857 854 gcc_assert (can_create_pseudo_p ());
15bbde2b 855
27a7a23a 856 /* If a virtual register with bigger mode alignment is generated,
857 increase stack alignment estimation because it might be spilled
858 to stack later. */
48e1416a 859 if (SUPPORTS_STACK_ALIGNMENT
27a7a23a 860 && crtl->stack_alignment_estimated < align
861 && !crtl->stack_realign_processed)
8645d3e7 862 {
863 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
864 if (crtl->stack_alignment_estimated < min_align)
865 crtl->stack_alignment_estimated = min_align;
866 }
27a7a23a 867
316bc009 868 if (generating_concat_p
869 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
870 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 871 {
872 /* For complex modes, don't make a single pseudo.
873 Instead, make a CONCAT of two pseudos.
874 This allows noncontiguous allocation of the real and imaginary parts,
875 which makes much better code. Besides, allocating DCmode
876 pseudos overstrains reload on some machines like the 386. */
877 rtx realpart, imagpart;
e9e12845 878 enum machine_mode partmode = GET_MODE_INNER (mode);
76c37538 879
880 realpart = gen_reg_rtx (partmode);
881 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 882 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 883 }
884
ca74b940 885 /* Make sure regno_pointer_align, and regno_reg_rtx are large
fcdc122e 886 enough to have an element for this pseudo reg number. */
15bbde2b 887
fd6ffb7c 888 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
15bbde2b 889 {
fd6ffb7c 890 int old_size = crtl->emit.regno_pointer_align_length;
9ce37fa7 891 char *tmp;
fcdc122e 892 rtx *new1;
fcdc122e 893
9ce37fa7 894 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
895 memset (tmp + old_size, 0, old_size);
896 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
0a893c29 897
2457c754 898 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
0a893c29 899 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 900 regno_reg_rtx = new1;
901
fd6ffb7c 902 crtl->emit.regno_pointer_align_length = old_size * 2;
15bbde2b 903 }
904
22cf44bc 905 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 906 regno_reg_rtx[reg_rtx_no++] = val;
907 return val;
908}
909
80c70e76 910/* Update NEW with the same attributes as REG, but with OFFSET added
911 to the REG_OFFSET. */
ca74b940 912
1a6a0f2a 913static void
9ce37fa7 914update_reg_offset (rtx new_rtx, rtx reg, int offset)
ca74b940 915{
9ce37fa7 916 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
35cb5232 917 REG_OFFSET (reg) + offset);
1a6a0f2a 918}
919
80c70e76 920/* Generate a register with same attributes as REG, but with OFFSET
921 added to the REG_OFFSET. */
1a6a0f2a 922
923rtx
924gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
925 int offset)
926{
9ce37fa7 927 rtx new_rtx = gen_rtx_REG (mode, regno);
1a6a0f2a 928
9ce37fa7 929 update_reg_offset (new_rtx, reg, offset);
930 return new_rtx;
1a6a0f2a 931}
932
933/* Generate a new pseudo-register with the same attributes as REG, but
80c70e76 934 with OFFSET added to the REG_OFFSET. */
1a6a0f2a 935
936rtx
937gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
938{
9ce37fa7 939 rtx new_rtx = gen_reg_rtx (mode);
1a6a0f2a 940
9ce37fa7 941 update_reg_offset (new_rtx, reg, offset);
942 return new_rtx;
ca74b940 943}
944
80c70e76 945/* Adjust REG in-place so that it has mode MODE. It is assumed that the
946 new register is a (possibly paradoxical) lowpart of the old one. */
ca74b940 947
948void
80c70e76 949adjust_reg_mode (rtx reg, enum machine_mode mode)
ca74b940 950{
80c70e76 951 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
952 PUT_MODE (reg, mode);
953}
954
955/* Copy REG's attributes from X, if X has any attributes. If REG and X
956 have different modes, REG is a (possibly paradoxical) lowpart of X. */
957
958void
959set_reg_attrs_from_value (rtx reg, rtx x)
960{
961 int offset;
962
ac56145e 963 /* Hard registers can be reused for multiple purposes within the same
964 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
965 on them is wrong. */
966 if (HARD_REGISTER_P (reg))
967 return;
968
80c70e76 969 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
ae12ddda 970 if (MEM_P (x))
971 {
da443c27 972 if (MEM_OFFSET_KNOWN_P (x))
973 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
974 MEM_OFFSET (x) + offset);
ae12ddda 975 if (MEM_POINTER (x))
40b93dba 976 mark_reg_pointer (reg, 0);
ae12ddda 977 }
978 else if (REG_P (x))
979 {
980 if (REG_ATTRS (x))
981 update_reg_offset (reg, x, offset);
982 if (REG_POINTER (x))
983 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
984 }
985}
986
987/* Generate a REG rtx for a new pseudo register, copying the mode
988 and attributes from X. */
989
990rtx
991gen_reg_rtx_and_attrs (rtx x)
992{
993 rtx reg = gen_reg_rtx (GET_MODE (x));
994 set_reg_attrs_from_value (reg, x);
995 return reg;
ca74b940 996}
997
263c416c 998/* Set the register attributes for registers contained in PARM_RTX.
999 Use needed values from memory attributes of MEM. */
1000
1001void
35cb5232 1002set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
263c416c 1003{
8ad4c111 1004 if (REG_P (parm_rtx))
80c70e76 1005 set_reg_attrs_from_value (parm_rtx, mem);
263c416c 1006 else if (GET_CODE (parm_rtx) == PARALLEL)
1007 {
1008 /* Check for a NULL entry in the first slot, used to indicate that the
1009 parameter goes both on the stack and in registers. */
1010 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1011 for (; i < XVECLEN (parm_rtx, 0); i++)
1012 {
1013 rtx x = XVECEXP (parm_rtx, 0, i);
8ad4c111 1014 if (REG_P (XEXP (x, 0)))
263c416c 1015 REG_ATTRS (XEXP (x, 0))
1016 = get_reg_attrs (MEM_EXPR (mem),
1017 INTVAL (XEXP (x, 1)));
1018 }
1019 }
1020}
1021
80c70e76 1022/* Set the REG_ATTRS for registers in value X, given that X represents
1023 decl T. */
ca74b940 1024
a8dd994c 1025void
80c70e76 1026set_reg_attrs_for_decl_rtl (tree t, rtx x)
1027{
1028 if (GET_CODE (x) == SUBREG)
ebfc27f5 1029 {
80c70e76 1030 gcc_assert (subreg_lowpart_p (x));
1031 x = SUBREG_REG (x);
ebfc27f5 1032 }
8ad4c111 1033 if (REG_P (x))
80c70e76 1034 REG_ATTRS (x)
1035 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
3ff99faa 1036 DECL_MODE (t)));
ca74b940 1037 if (GET_CODE (x) == CONCAT)
1038 {
1039 if (REG_P (XEXP (x, 0)))
1040 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1041 if (REG_P (XEXP (x, 1)))
1042 REG_ATTRS (XEXP (x, 1))
1043 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1044 }
1045 if (GET_CODE (x) == PARALLEL)
1046 {
85d25060 1047 int i, start;
1048
1049 /* Check for a NULL entry, used to indicate that the parameter goes
1050 both on the stack and in registers. */
1051 if (XEXP (XVECEXP (x, 0, 0), 0))
1052 start = 0;
1053 else
1054 start = 1;
1055
1056 for (i = start; i < XVECLEN (x, 0); i++)
ca74b940 1057 {
1058 rtx y = XVECEXP (x, 0, i);
1059 if (REG_P (XEXP (y, 0)))
1060 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1061 }
1062 }
1063}
1064
80c70e76 1065/* Assign the RTX X to declaration T. */
1066
1067void
1068set_decl_rtl (tree t, rtx x)
1069{
1070 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1071 if (x)
1072 set_reg_attrs_for_decl_rtl (t, x);
1073}
1074
d91cf567 1075/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1076 if the ABI requires the parameter to be passed by reference. */
80c70e76 1077
1078void
d91cf567 1079set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
80c70e76 1080{
1081 DECL_INCOMING_RTL (t) = x;
d91cf567 1082 if (x && !by_reference_p)
80c70e76 1083 set_reg_attrs_for_decl_rtl (t, x);
1084}
1085
de8ecfb5 1086/* Identify REG (which may be a CONCAT) as a user register. */
1087
1088void
35cb5232 1089mark_user_reg (rtx reg)
de8ecfb5 1090{
1091 if (GET_CODE (reg) == CONCAT)
1092 {
1093 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1094 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1095 }
de8ecfb5 1096 else
611234b4 1097 {
1098 gcc_assert (REG_P (reg));
1099 REG_USERVAR_P (reg) = 1;
1100 }
de8ecfb5 1101}
1102
d4c332ff 1103/* Identify REG as a probable pointer register and show its alignment
1104 as ALIGN, if nonzero. */
15bbde2b 1105
1106void
35cb5232 1107mark_reg_pointer (rtx reg, int align)
15bbde2b 1108{
e61a0a7f 1109 if (! REG_POINTER (reg))
612409a6 1110 {
e61a0a7f 1111 REG_POINTER (reg) = 1;
d4c332ff 1112
612409a6 1113 if (align)
1114 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1115 }
1116 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
8b332087 1117 /* We can no-longer be sure just how aligned this pointer is. */
d4c332ff 1118 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 1119}
1120
1121/* Return 1 plus largest pseudo reg number used in the current function. */
1122
1123int
35cb5232 1124max_reg_num (void)
15bbde2b 1125{
1126 return reg_rtx_no;
1127}
1128
1129/* Return 1 + the largest label number used so far in the current function. */
1130
1131int
35cb5232 1132max_label_num (void)
15bbde2b 1133{
15bbde2b 1134 return label_num;
1135}
1136
1137/* Return first label number used in this function (if any were used). */
1138
1139int
35cb5232 1140get_first_label_num (void)
15bbde2b 1141{
1142 return first_label_num;
1143}
4ee9c684 1144
1145/* If the rtx for label was created during the expansion of a nested
1146 function, then first_label_num won't include this label number.
f0b5f617 1147 Fix this now so that array indices work later. */
4ee9c684 1148
1149void
1150maybe_set_first_label_num (rtx x)
1151{
1152 if (CODE_LABEL_NUMBER (x) < first_label_num)
1153 first_label_num = CODE_LABEL_NUMBER (x);
1154}
15bbde2b 1155\f
1156/* Return a value representing some low-order bits of X, where the number
1157 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1158 between floating-point and fixed-point values, rather, the bit
15bbde2b 1159 representation is returned.
1160
1161 This function handles the cases in common between gen_lowpart, below,
1162 and two variants in cse.c and combine.c. These are the cases that can
1163 be safely handled at all points in the compilation.
1164
1165 If this is not a case we can handle, return 0. */
1166
1167rtx
35cb5232 1168gen_lowpart_common (enum machine_mode mode, rtx x)
15bbde2b 1169{
701e46d0 1170 int msize = GET_MODE_SIZE (mode);
791172c5 1171 int xsize;
701e46d0 1172 int offset = 0;
791172c5 1173 enum machine_mode innermode;
1174
1175 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1176 so we have to make one up. Yuk. */
1177 innermode = GET_MODE (x);
971ba038 1178 if (CONST_INT_P (x)
6c799a83 1179 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
791172c5 1180 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1181 else if (innermode == VOIDmode)
1182 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
48e1416a 1183
791172c5 1184 xsize = GET_MODE_SIZE (innermode);
1185
611234b4 1186 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
15bbde2b 1187
791172c5 1188 if (innermode == mode)
15bbde2b 1189 return x;
1190
1191 /* MODE must occupy no more words than the mode of X. */
791172c5 1192 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1193 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
15bbde2b 1194 return 0;
1195
9abe1e73 1196 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
cee7491d 1197 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
9abe1e73 1198 return 0;
1199
791172c5 1200 offset = subreg_lowpart_offset (mode, innermode);
15bbde2b 1201
1202 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 1203 && (GET_MODE_CLASS (mode) == MODE_INT
1204 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 1205 {
1206 /* If we are getting the low-order part of something that has been
1207 sign- or zero-extended, we can either just use the object being
1208 extended or make a narrower extension. If we want an even smaller
1209 piece than the size of the object being extended, call ourselves
1210 recursively.
1211
1212 This case is used mostly by combine and cse. */
1213
1214 if (GET_MODE (XEXP (x, 0)) == mode)
1215 return XEXP (x, 0);
791172c5 1216 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
15bbde2b 1217 return gen_lowpart_common (mode, XEXP (x, 0));
791172c5 1218 else if (msize < xsize)
3ad7bb1c 1219 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 1220 }
8ad4c111 1221 else if (GET_CODE (x) == SUBREG || REG_P (x)
791172c5 1222 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
971ba038 1223 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
791172c5 1224 return simplify_gen_subreg (mode, x, innermode, offset);
4a307dd5 1225
15bbde2b 1226 /* Otherwise, we can't do this. */
1227 return 0;
1228}
1229\f
d56d0ca2 1230rtx
35cb5232 1231gen_highpart (enum machine_mode mode, rtx x)
d56d0ca2 1232{
701e46d0 1233 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1234 rtx result;
701e46d0 1235
d56d0ca2 1236 /* This case loses if X is a subreg. To catch bugs early,
1237 complain if an invalid MODE is used even in other cases. */
611234b4 1238 gcc_assert (msize <= UNITS_PER_WORD
1239 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
701e46d0 1240
81802af6 1241 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1242 subreg_highpart_offset (mode, GET_MODE (x)));
611234b4 1243 gcc_assert (result);
48e1416a 1244
a8c36ab2 1245 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1246 the target if we have a MEM. gen_highpart must return a valid operand,
1247 emitting code if necessary to do so. */
611234b4 1248 if (MEM_P (result))
1249 {
1250 result = validize_mem (result);
1251 gcc_assert (result);
1252 }
48e1416a 1253
81802af6 1254 return result;
1255}
704fcf2b 1256
29d56731 1257/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1258 be VOIDmode constant. */
1259rtx
35cb5232 1260gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
704fcf2b 1261{
1262 if (GET_MODE (exp) != VOIDmode)
1263 {
611234b4 1264 gcc_assert (GET_MODE (exp) == innermode);
704fcf2b 1265 return gen_highpart (outermode, exp);
1266 }
1267 return simplify_gen_subreg (outermode, exp, innermode,
1268 subreg_highpart_offset (outermode, innermode));
1269}
d4c5e26d 1270
80c70e76 1271/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
10ef59ac 1272
81802af6 1273unsigned int
35cb5232 1274subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
81802af6 1275{
1276 unsigned int offset = 0;
1277 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1278
81802af6 1279 if (difference > 0)
d56d0ca2 1280 {
81802af6 1281 if (WORDS_BIG_ENDIAN)
1282 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1283 if (BYTES_BIG_ENDIAN)
1284 offset += difference % UNITS_PER_WORD;
d56d0ca2 1285 }
701e46d0 1286
81802af6 1287 return offset;
d56d0ca2 1288}
64ab453f 1289
81802af6 1290/* Return offset in bytes to get OUTERMODE high part
1291 of the value in mode INNERMODE stored in memory in target format. */
1292unsigned int
35cb5232 1293subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
64ab453f 1294{
1295 unsigned int offset = 0;
1296 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1297
611234b4 1298 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
81802af6 1299
64ab453f 1300 if (difference > 0)
1301 {
81802af6 1302 if (! WORDS_BIG_ENDIAN)
64ab453f 1303 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1304 if (! BYTES_BIG_ENDIAN)
64ab453f 1305 offset += difference % UNITS_PER_WORD;
1306 }
1307
81802af6 1308 return offset;
64ab453f 1309}
d56d0ca2 1310
15bbde2b 1311/* Return 1 iff X, assumed to be a SUBREG,
1312 refers to the least significant part of its containing reg.
1313 If X is not a SUBREG, always return 1 (it is its own low part!). */
1314
1315int
b7bf20db 1316subreg_lowpart_p (const_rtx x)
15bbde2b 1317{
1318 if (GET_CODE (x) != SUBREG)
1319 return 1;
7e14c1bf 1320 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1321 return 0;
15bbde2b 1322
81802af6 1323 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1324 == SUBREG_BYTE (x));
15bbde2b 1325}
b537bfdb 1326
1327/* Return true if X is a paradoxical subreg, false otherwise. */
1328bool
1329paradoxical_subreg_p (const_rtx x)
1330{
1331 if (GET_CODE (x) != SUBREG)
1332 return false;
1333 return (GET_MODE_PRECISION (GET_MODE (x))
1334 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1335}
15bbde2b 1336\f
701e46d0 1337/* Return subword OFFSET of operand OP.
1338 The word number, OFFSET, is interpreted as the word number starting
1339 at the low-order address. OFFSET 0 is the low-order word if not
1340 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1341
1342 If we cannot extract the required word, we return zero. Otherwise,
1343 an rtx corresponding to the requested word will be returned.
1344
1345 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1346 reload has completed, a valid address will always be returned. After
1347 reload, if a valid address cannot be returned, we return zero.
1348
1349 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1350 it is the responsibility of the caller.
1351
1352 MODE is the mode of OP in case it is a CONST_INT.
1353
1354 ??? This is still rather broken for some cases. The problem for the
1355 moment is that all callers of this thing provide no 'goal mode' to
1356 tell us to work with. This exists because all callers were written
84e81e84 1357 in a word based SUBREG world.
1358 Now use of this function can be deprecated by simplify_subreg in most
1359 cases.
1360 */
701e46d0 1361
1362rtx
35cb5232 1363operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
701e46d0 1364{
1365 if (mode == VOIDmode)
1366 mode = GET_MODE (op);
1367
611234b4 1368 gcc_assert (mode != VOIDmode);
701e46d0 1369
6312a35e 1370 /* If OP is narrower than a word, fail. */
701e46d0 1371 if (mode != BLKmode
1372 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1373 return 0;
1374
6312a35e 1375 /* If we want a word outside OP, return zero. */
701e46d0 1376 if (mode != BLKmode
1377 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1378 return const0_rtx;
1379
701e46d0 1380 /* Form a new MEM at the requested address. */
e16ceb8e 1381 if (MEM_P (op))
701e46d0 1382 {
9ce37fa7 1383 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1384
e4e86ec5 1385 if (! validate_address)
9ce37fa7 1386 return new_rtx;
e4e86ec5 1387
1388 else if (reload_completed)
701e46d0 1389 {
bd1a81f7 1390 if (! strict_memory_address_addr_space_p (word_mode,
1391 XEXP (new_rtx, 0),
1392 MEM_ADDR_SPACE (op)))
e4e86ec5 1393 return 0;
701e46d0 1394 }
e4e86ec5 1395 else
9ce37fa7 1396 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
701e46d0 1397 }
1398
84e81e84 1399 /* Rest can be handled by simplify_subreg. */
1400 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1401}
1402
89f18f73 1403/* Similar to `operand_subword', but never return 0. If we can't
1404 extract the required subword, put OP into a register and try again.
1405 The second attempt must succeed. We always validate the address in
1406 this case.
15bbde2b 1407
1408 MODE is the mode of OP, in case it is CONST_INT. */
1409
1410rtx
35cb5232 1411operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
15bbde2b 1412{
701e46d0 1413 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1414
1415 if (result)
1416 return result;
1417
1418 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1419 {
1420 /* If this is a register which can not be accessed by words, copy it
1421 to a pseudo register. */
8ad4c111 1422 if (REG_P (op))
ac825d29 1423 op = copy_to_reg (op);
1424 else
1425 op = force_reg (mode, op);
1426 }
15bbde2b 1427
701e46d0 1428 result = operand_subword (op, offset, 1, mode);
611234b4 1429 gcc_assert (result);
15bbde2b 1430
1431 return result;
1432}
1433\f
b3ff8d90 1434/* Returns 1 if both MEM_EXPR can be considered equal
1435 and 0 otherwise. */
1436
1437int
52d07779 1438mem_expr_equal_p (const_tree expr1, const_tree expr2)
b3ff8d90 1439{
1440 if (expr1 == expr2)
1441 return 1;
1442
1443 if (! expr1 || ! expr2)
1444 return 0;
1445
1446 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1447 return 0;
1448
3a443843 1449 return operand_equal_p (expr1, expr2, 0);
b3ff8d90 1450}
1451
ad0a178f 1452/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1453 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1454 -1 if not known. */
1455
1456int
7cfdc2f0 1457get_mem_align_offset (rtx mem, unsigned int align)
ad0a178f 1458{
1459 tree expr;
1460 unsigned HOST_WIDE_INT offset;
1461
1462 /* This function can't use
da443c27 1463 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
98ab9e8f 1464 || (MAX (MEM_ALIGN (mem),
1465 get_object_alignment (MEM_EXPR (mem), align))
ad0a178f 1466 < align))
1467 return -1;
1468 else
da443c27 1469 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
ad0a178f 1470 for two reasons:
1471 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1472 for <variable>. get_inner_reference doesn't handle it and
1473 even if it did, the alignment in that case needs to be determined
1474 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1475 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1476 isn't sufficiently aligned, the object it is in might be. */
1477 gcc_assert (MEM_P (mem));
1478 expr = MEM_EXPR (mem);
da443c27 1479 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
ad0a178f 1480 return -1;
1481
da443c27 1482 offset = MEM_OFFSET (mem);
ad0a178f 1483 if (DECL_P (expr))
1484 {
1485 if (DECL_ALIGN (expr) < align)
1486 return -1;
1487 }
1488 else if (INDIRECT_REF_P (expr))
1489 {
1490 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1491 return -1;
1492 }
1493 else if (TREE_CODE (expr) == COMPONENT_REF)
1494 {
1495 while (1)
1496 {
1497 tree inner = TREE_OPERAND (expr, 0);
1498 tree field = TREE_OPERAND (expr, 1);
1499 tree byte_offset = component_ref_field_offset (expr);
1500 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1501
1502 if (!byte_offset
1503 || !host_integerp (byte_offset, 1)
1504 || !host_integerp (bit_offset, 1))
1505 return -1;
1506
1507 offset += tree_low_cst (byte_offset, 1);
1508 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1509
1510 if (inner == NULL_TREE)
1511 {
1512 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1513 < (unsigned int) align)
1514 return -1;
1515 break;
1516 }
1517 else if (DECL_P (inner))
1518 {
1519 if (DECL_ALIGN (inner) < align)
1520 return -1;
1521 break;
1522 }
1523 else if (TREE_CODE (inner) != COMPONENT_REF)
1524 return -1;
1525 expr = inner;
1526 }
1527 }
1528 else
1529 return -1;
1530
1531 return offset & ((align / BITS_PER_UNIT) - 1);
1532}
1533
310b57a1 1534/* Given REF (a MEM) and T, either the type of X or the expression
c6259b83 1535 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1536 if we are making a new object of this type. BITPOS is nonzero if
1537 there is an offset outstanding on T that will be applied later. */
c6259b83 1538
1539void
35cb5232 1540set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1541 HOST_WIDE_INT bitpos)
c6259b83 1542{
6f717f77 1543 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1544 tree type;
d72886b5 1545 struct mem_attrs attrs, *defattrs, *refattrs;
c6259b83 1546
1547 /* It can happen that type_for_mode was given a mode for which there
1548 is no language-level type. In which case it returns NULL, which
1549 we can see here. */
1550 if (t == NULL_TREE)
1551 return;
1552
1553 type = TYPE_P (t) ? t : TREE_TYPE (t);
4ccffa39 1554 if (type == error_mark_node)
1555 return;
c6259b83 1556
c6259b83 1557 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1558 wrong answer, as it assumes that DECL_RTL already has the right alias
1559 info. Callers should not set DECL_RTL until after the call to
1560 set_mem_attributes. */
611234b4 1561 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
c6259b83 1562
d72886b5 1563 memset (&attrs, 0, sizeof (attrs));
1564
96216d37 1565 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1566 front-end routine) and use it. */
d72886b5 1567 attrs.alias = get_alias_set (t);
c6259b83 1568
fbc6244b 1569 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
cfefc966 1570 MEM_IN_STRUCT_P (ref)
1571 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
8d350e69 1572 MEM_POINTER (ref) = POINTER_TYPE_P (type);
c6259b83 1573
2a631e19 1574 /* If we are making an object of this type, or if this is a DECL, we know
1575 that it is a scalar if the type is not an aggregate. */
cfefc966 1576 if ((objectp || DECL_P (t))
1577 && ! AGGREGATE_TYPE_P (type)
1578 && TREE_CODE (type) != COMPLEX_TYPE)
c6259b83 1579 MEM_SCALAR_P (ref) = 1;
1580
d8dccfe9 1581 /* Default values from pre-existing memory attributes if present. */
d72886b5 1582 refattrs = MEM_ATTRS (ref);
1583 if (refattrs)
d8dccfe9 1584 {
1585 /* ??? Can this ever happen? Calling this routine on a MEM that
1586 already carries memory attributes should probably be invalid. */
d72886b5 1587 attrs.expr = refattrs->expr;
1588 attrs.offset = refattrs->offset;
1589 attrs.size = refattrs->size;
1590 attrs.align = refattrs->align;
d8dccfe9 1591 }
1592
1593 /* Otherwise, default values from the mode of the MEM reference. */
d72886b5 1594 else
d8dccfe9 1595 {
d72886b5 1596 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1597 gcc_assert (!defattrs->expr);
1598 gcc_assert (!defattrs->offset);
1599
d8dccfe9 1600 /* Respect mode size. */
d72886b5 1601 attrs.size = defattrs->size;
d8dccfe9 1602 /* ??? Is this really necessary? We probably should always get
1603 the size from the type below. */
1604
1605 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1606 if T is an object, always compute the object alignment below. */
d72886b5 1607 if (TYPE_P (t))
1608 attrs.align = defattrs->align;
1609 else
1610 attrs.align = BITS_PER_UNIT;
d8dccfe9 1611 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1612 e.g. if the type carries an alignment attribute. Should we be
1613 able to simply always use TYPE_ALIGN? */
1614 }
1615
a9d9ab08 1616 /* We can set the alignment from the type if we are making an object,
1617 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
679e0056 1618 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
d72886b5 1619 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
679e0056 1620
182cf5a9 1621 else if (TREE_CODE (t) == MEM_REF)
1622 {
679e0056 1623 tree op0 = TREE_OPERAND (t, 0);
06a807aa 1624 if (TREE_CODE (op0) == ADDR_EXPR
1625 && (DECL_P (TREE_OPERAND (op0, 0))
1626 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
182cf5a9 1627 {
06a807aa 1628 if (DECL_P (TREE_OPERAND (op0, 0)))
d72886b5 1629 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
06a807aa 1630 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1631 {
d72886b5 1632 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
182cf5a9 1633#ifdef CONSTANT_ALIGNMENT
d72886b5 1634 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1635 attrs.align);
182cf5a9 1636#endif
06a807aa 1637 }
1638 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1639 {
1640 unsigned HOST_WIDE_INT ioff
1641 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1642 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
d72886b5 1643 attrs.align = MIN (aoff, attrs.align);
06a807aa 1644 }
182cf5a9 1645 }
1646 else
936dedf3 1647 /* ??? This isn't fully correct, we can't set the alignment from the
1648 type in all cases. */
d72886b5 1649 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
182cf5a9 1650 }
679e0056 1651
559c9389 1652 else if (TREE_CODE (t) == TARGET_MEM_REF)
1653 /* ??? This isn't fully correct, we can't set the alignment from the
1654 type in all cases. */
d72886b5 1655 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
559c9389 1656
96216d37 1657 /* If the size is known, we can set that. */
1658 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
d72886b5 1659 attrs.size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
96216d37 1660
579bccf9 1661 /* If T is not a type, we may be able to deduce some more information about
1662 the expression. */
1663 if (! TYPE_P (t))
2a631e19 1664 {
ae2dd339 1665 tree base;
698537d1 1666 bool align_computed = false;
b04fab2a 1667
2a631e19 1668 if (TREE_THIS_VOLATILE (t))
1669 MEM_VOLATILE_P (ref) = 1;
c6259b83 1670
3c00f11c 1671 /* Now remove any conversions: they don't change what the underlying
1672 object is. Likewise for SAVE_EXPR. */
72dd6141 1673 while (CONVERT_EXPR_P (t)
3c00f11c 1674 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1675 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1676 t = TREE_OPERAND (t, 0);
1677
73eb0a09 1678 /* Note whether this expression can trap. */
1679 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1680
1681 base = get_base_address (t);
ae2dd339 1682 if (base && DECL_P (base)
1683 && TREE_READONLY (base)
a33a5782 1684 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1685 && !TREE_THIS_VOLATILE (base))
a62dc878 1686 MEM_READONLY_P (ref) = 1;
ae2dd339 1687
2b02580f 1688 /* If this expression uses it's parent's alias set, mark it such
1689 that we won't change it. */
1690 if (component_uses_parent_alias_set (t))
5cc193e7 1691 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1692
2a631e19 1693 /* If this is a decl, set the attributes of the MEM from it. */
1694 if (DECL_P (t))
1695 {
d72886b5 1696 attrs.expr = t;
1697 attrs.offset = const0_rtx;
6f717f77 1698 apply_bitpos = bitpos;
d72886b5 1699 attrs.size = (DECL_SIZE_UNIT (t)
1700 && host_integerp (DECL_SIZE_UNIT (t), 1)
1701 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1702 attrs.align = DECL_ALIGN (t);
698537d1 1703 align_computed = true;
2a631e19 1704 }
1705
ecfe4ca9 1706 /* If this is a constant, we know the alignment. */
ce45a448 1707 else if (CONSTANT_CLASS_P (t))
42f6f447 1708 {
d72886b5 1709 attrs.align = TYPE_ALIGN (type);
42f6f447 1710#ifdef CONSTANT_ALIGNMENT
d72886b5 1711 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
42f6f447 1712#endif
698537d1 1713 align_computed = true;
42f6f447 1714 }
b10dbbca 1715
1716 /* If this is a field reference and not a bit-field, record it. */
f0b5f617 1717 /* ??? There is some information that can be gleaned from bit-fields,
b10dbbca 1718 such as the word offset in the structure that might be modified.
1719 But skip it for now. */
1720 else if (TREE_CODE (t) == COMPONENT_REF
1721 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1722 {
d72886b5 1723 attrs.expr = t;
1724 attrs.offset = const0_rtx;
6f717f77 1725 apply_bitpos = bitpos;
b10dbbca 1726 /* ??? Any reason the field size would be different than
1727 the size we got from the type? */
1728 }
1729
1730 /* If this is an array reference, look for an outer field reference. */
1731 else if (TREE_CODE (t) == ARRAY_REF)
1732 {
1733 tree off_tree = size_zero_node;
6b039979 1734 /* We can't modify t, because we use it at the end of the
1735 function. */
1736 tree t2 = t;
b10dbbca 1737
1738 do
1739 {
6b039979 1740 tree index = TREE_OPERAND (t2, 1);
6374121b 1741 tree low_bound = array_ref_low_bound (t2);
1742 tree unit_size = array_ref_element_size (t2);
97f8ce30 1743
1744 /* We assume all arrays have sizes that are a multiple of a byte.
1745 First subtract the lower bound, if any, in the type of the
6374121b 1746 index, then convert to sizetype and multiply by the size of
1747 the array element. */
1748 if (! integer_zerop (low_bound))
faa43f85 1749 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1750 index, low_bound);
97f8ce30 1751
6374121b 1752 off_tree = size_binop (PLUS_EXPR,
535664e3 1753 size_binop (MULT_EXPR,
1754 fold_convert (sizetype,
1755 index),
6374121b 1756 unit_size),
1757 off_tree);
6b039979 1758 t2 = TREE_OPERAND (t2, 0);
b10dbbca 1759 }
6b039979 1760 while (TREE_CODE (t2) == ARRAY_REF);
b10dbbca 1761
6b039979 1762 if (DECL_P (t2))
2d8fe5d0 1763 {
d72886b5 1764 attrs.expr = t2;
1765 attrs.offset = NULL;
2d8fe5d0 1766 if (host_integerp (off_tree, 1))
0318dc09 1767 {
1768 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1769 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
d72886b5 1770 attrs.align = DECL_ALIGN (t2);
1771 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1772 attrs.align = aoff;
698537d1 1773 align_computed = true;
d72886b5 1774 attrs.offset = GEN_INT (ioff);
6f717f77 1775 apply_bitpos = bitpos;
0318dc09 1776 }
2d8fe5d0 1777 }
6b039979 1778 else if (TREE_CODE (t2) == COMPONENT_REF)
b10dbbca 1779 {
d72886b5 1780 attrs.expr = t2;
1781 attrs.offset = NULL;
b10dbbca 1782 if (host_integerp (off_tree, 1))
6f717f77 1783 {
d72886b5 1784 attrs.offset = GEN_INT (tree_low_cst (off_tree, 1));
6f717f77 1785 apply_bitpos = bitpos;
1786 }
b10dbbca 1787 /* ??? Any reason the field size would be different than
1788 the size we got from the type? */
1789 }
6d72287b 1790
6d72287b 1791 /* If this is an indirect reference, record it. */
5d9de213 1792 else if (TREE_CODE (t) == MEM_REF)
6d72287b 1793 {
d72886b5 1794 attrs.expr = t;
1795 attrs.offset = const0_rtx;
6d72287b 1796 apply_bitpos = bitpos;
1797 }
2d8fe5d0 1798 }
1799
6d72287b 1800 /* If this is an indirect reference, record it. */
182cf5a9 1801 else if (TREE_CODE (t) == MEM_REF
5d9de213 1802 || TREE_CODE (t) == TARGET_MEM_REF)
6d72287b 1803 {
d72886b5 1804 attrs.expr = t;
1805 attrs.offset = const0_rtx;
6d72287b 1806 apply_bitpos = bitpos;
1807 }
1808
698537d1 1809 if (!align_computed && !INDIRECT_REF_P (t))
1810 {
98ab9e8f 1811 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
d72886b5 1812 attrs.align = MAX (attrs.align, obj_align);
698537d1 1813 }
2a631e19 1814 }
1815
e2e205b3 1816 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 1817 bit position offset. Similarly, increase the size of the accessed
1818 object to contain the negative offset. */
6f717f77 1819 if (apply_bitpos)
595f1461 1820 {
d72886b5 1821 attrs.offset = plus_constant (attrs.offset,
1822 -(apply_bitpos / BITS_PER_UNIT));
1823 if (attrs.size)
1824 attrs.size = plus_constant (attrs.size, apply_bitpos / BITS_PER_UNIT);
595f1461 1825 }
6f717f77 1826
2a631e19 1827 /* Now set the attributes we computed above. */
d72886b5 1828 set_mem_attrs (ref, &attrs);
2a631e19 1829
1830 /* If this is already known to be a scalar or aggregate, we are done. */
1831 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
96216d37 1832 return;
1833
2a631e19 1834 /* If it is a reference into an aggregate, this is part of an aggregate.
1835 Otherwise we don't know. */
c6259b83 1836 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1837 || TREE_CODE (t) == ARRAY_RANGE_REF
1838 || TREE_CODE (t) == BIT_FIELD_REF)
1839 MEM_IN_STRUCT_P (ref) = 1;
1840}
1841
6f717f77 1842void
35cb5232 1843set_mem_attributes (rtx ref, tree t, int objectp)
6f717f77 1844{
1845 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1846}
1847
c6259b83 1848/* Set the alias set of MEM to SET. */
1849
1850void
32c2fdea 1851set_mem_alias_set (rtx mem, alias_set_type set)
c6259b83 1852{
d72886b5 1853 struct mem_attrs attrs;
1854
c6259b83 1855 /* If the new and old alias sets don't conflict, something is wrong. */
1b4345f7 1856 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
d72886b5 1857 attrs = *get_mem_attrs (mem);
1858 attrs.alias = set;
1859 set_mem_attrs (mem, &attrs);
bd1a81f7 1860}
1861
1862/* Set the address space of MEM to ADDRSPACE (target-defined). */
1863
1864void
1865set_mem_addr_space (rtx mem, addr_space_t addrspace)
1866{
d72886b5 1867 struct mem_attrs attrs;
1868
1869 attrs = *get_mem_attrs (mem);
1870 attrs.addrspace = addrspace;
1871 set_mem_attrs (mem, &attrs);
c6259b83 1872}
96216d37 1873
1c4512da 1874/* Set the alignment of MEM to ALIGN bits. */
96216d37 1875
1876void
35cb5232 1877set_mem_align (rtx mem, unsigned int align)
96216d37 1878{
d72886b5 1879 struct mem_attrs attrs;
1880
1881 attrs = *get_mem_attrs (mem);
1882 attrs.align = align;
1883 set_mem_attrs (mem, &attrs);
96216d37 1884}
278fe152 1885
b10dbbca 1886/* Set the expr for MEM to EXPR. */
278fe152 1887
1888void
35cb5232 1889set_mem_expr (rtx mem, tree expr)
278fe152 1890{
d72886b5 1891 struct mem_attrs attrs;
1892
1893 attrs = *get_mem_attrs (mem);
1894 attrs.expr = expr;
1895 set_mem_attrs (mem, &attrs);
278fe152 1896}
b10dbbca 1897
1898/* Set the offset of MEM to OFFSET. */
1899
1900void
da443c27 1901set_mem_offset (rtx mem, HOST_WIDE_INT offset)
b10dbbca 1902{
d72886b5 1903 struct mem_attrs attrs;
1904
1905 attrs = *get_mem_attrs (mem);
da443c27 1906 attrs.offset = GEN_INT (offset);
1907 set_mem_attrs (mem, &attrs);
1908}
1909
1910/* Clear the offset of MEM. */
1911
1912void
1913clear_mem_offset (rtx mem)
1914{
1915 struct mem_attrs attrs;
1916
1917 attrs = *get_mem_attrs (mem);
1918 attrs.offset = NULL_RTX;
d72886b5 1919 set_mem_attrs (mem, &attrs);
f0500469 1920}
1921
1922/* Set the size of MEM to SIZE. */
1923
1924void
5b2a69fa 1925set_mem_size (rtx mem, HOST_WIDE_INT size)
f0500469 1926{
d72886b5 1927 struct mem_attrs attrs;
1928
1929 attrs = *get_mem_attrs (mem);
5b2a69fa 1930 attrs.size = GEN_INT (size);
1931 set_mem_attrs (mem, &attrs);
1932}
1933
1934/* Clear the size of MEM. */
1935
1936void
1937clear_mem_size (rtx mem)
1938{
1939 struct mem_attrs attrs;
1940
1941 attrs = *get_mem_attrs (mem);
1942 attrs.size = NULL_RTX;
d72886b5 1943 set_mem_attrs (mem, &attrs);
b10dbbca 1944}
c6259b83 1945\f
96216d37 1946/* Return a memory reference like MEMREF, but with its mode changed to MODE
1947 and its address changed to ADDR. (VOIDmode means don't change the mode.
1948 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1949 returned memory location is required to be valid. The memory
1950 attributes are not changed. */
15bbde2b 1951
96216d37 1952static rtx
35cb5232 1953change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
15bbde2b 1954{
bd1a81f7 1955 addr_space_t as;
9ce37fa7 1956 rtx new_rtx;
15bbde2b 1957
611234b4 1958 gcc_assert (MEM_P (memref));
bd1a81f7 1959 as = MEM_ADDR_SPACE (memref);
15bbde2b 1960 if (mode == VOIDmode)
1961 mode = GET_MODE (memref);
1962 if (addr == 0)
1963 addr = XEXP (memref, 0);
3988ef8b 1964 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
bd1a81f7 1965 && (!validate || memory_address_addr_space_p (mode, addr, as)))
3988ef8b 1966 return memref;
15bbde2b 1967
e4e86ec5 1968 if (validate)
15bbde2b 1969 {
e4e86ec5 1970 if (reload_in_progress || reload_completed)
bd1a81f7 1971 gcc_assert (memory_address_addr_space_p (mode, addr, as));
e4e86ec5 1972 else
bd1a81f7 1973 addr = memory_address_addr_space (mode, addr, as);
15bbde2b 1974 }
d823ba47 1975
e8976cd7 1976 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1977 return memref;
1978
9ce37fa7 1979 new_rtx = gen_rtx_MEM (mode, addr);
1980 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1981 return new_rtx;
15bbde2b 1982}
537ffcfc 1983
96216d37 1984/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1985 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 1986
1987rtx
35cb5232 1988change_address (rtx memref, enum machine_mode mode, rtx addr)
e513d163 1989{
d72886b5 1990 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
9ce37fa7 1991 enum machine_mode mmode = GET_MODE (new_rtx);
d72886b5 1992 struct mem_attrs attrs, *defattrs;
0ab96142 1993
d72886b5 1994 attrs = *get_mem_attrs (memref);
1995 defattrs = mode_mem_attrs[(int) mmode];
1996 attrs.expr = defattrs->expr;
1997 attrs.offset = defattrs->offset;
1998 attrs.size = defattrs->size;
1999 attrs.align = defattrs->align;
6cc60c4d 2000
d28edf0d 2001 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2002 if (new_rtx == memref)
0ab96142 2003 {
d72886b5 2004 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
9ce37fa7 2005 return new_rtx;
0ab96142 2006
9ce37fa7 2007 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2008 MEM_COPY_ATTRIBUTES (new_rtx, memref);
0ab96142 2009 }
d28edf0d 2010
d72886b5 2011 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2012 return new_rtx;
e513d163 2013}
537ffcfc 2014
96216d37 2015/* Return a memory reference like MEMREF, but with its mode changed
2016 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 2017 nonzero, the memory address is forced to be valid.
2018 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2019 and caller is responsible for adjusting MEMREF base register. */
e4e86ec5 2020
2021rtx
35cb5232 2022adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2023 int validate, int adjust)
e4e86ec5 2024{
fb257ae6 2025 rtx addr = XEXP (memref, 0);
9ce37fa7 2026 rtx new_rtx;
d72886b5 2027 enum machine_mode address_mode;
cfb75cdf 2028 int pbits;
d72886b5 2029 struct mem_attrs attrs, *defattrs;
2030 unsigned HOST_WIDE_INT max_align;
2031
2032 attrs = *get_mem_attrs (memref);
fb257ae6 2033
d28edf0d 2034 /* If there are no changes, just return the original memory reference. */
2035 if (mode == GET_MODE (memref) && !offset
d72886b5 2036 && (!validate || memory_address_addr_space_p (mode, addr,
2037 attrs.addrspace)))
d28edf0d 2038 return memref;
2039
e36c3d58 2040 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 2041 This may happen even if offset is nonzero -- consider
e36c3d58 2042 (plus (plus reg reg) const_int) -- so do this always. */
2043 addr = copy_rtx (addr);
2044
cfb75cdf 2045 /* Convert a possibly large offset to a signed value within the
2046 range of the target address space. */
d72886b5 2047 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
98155838 2048 pbits = GET_MODE_BITSIZE (address_mode);
cfb75cdf 2049 if (HOST_BITS_PER_WIDE_INT > pbits)
2050 {
2051 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2052 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2053 >> shift);
2054 }
2055
cd358719 2056 if (adjust)
2057 {
2058 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2059 object, we can merge it into the LO_SUM. */
2060 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2061 && offset >= 0
2062 && (unsigned HOST_WIDE_INT) offset
2063 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
98155838 2064 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
cd358719 2065 plus_constant (XEXP (addr, 1), offset));
2066 else
2067 addr = plus_constant (addr, offset);
2068 }
fb257ae6 2069
9ce37fa7 2070 new_rtx = change_address_1 (memref, mode, addr, validate);
96216d37 2071
e077413c 2072 /* If the address is a REG, change_address_1 rightfully returns memref,
2073 but this would destroy memref's MEM_ATTRS. */
2074 if (new_rtx == memref && offset != 0)
2075 new_rtx = copy_rtx (new_rtx);
2076
96216d37 2077 /* Compute the new values of the memory attributes due to this adjustment.
2078 We add the offsets and update the alignment. */
d72886b5 2079 if (attrs.offset)
2080 attrs.offset = GEN_INT (offset + INTVAL (attrs.offset));
96216d37 2081
b8098e5b 2082 /* Compute the new alignment by taking the MIN of the alignment and the
2083 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2084 if zero. */
2085 if (offset != 0)
d72886b5 2086 {
2087 max_align = (offset & -offset) * BITS_PER_UNIT;
2088 attrs.align = MIN (attrs.align, max_align);
2089 }
96216d37 2090
5cc193e7 2091 /* We can compute the size in a number of ways. */
d72886b5 2092 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2093 if (defattrs->size)
2094 attrs.size = defattrs->size;
2095 else if (attrs.size)
2096 attrs.size = plus_constant (attrs.size, -offset);
5cc193e7 2097
d72886b5 2098 set_mem_attrs (new_rtx, &attrs);
96216d37 2099
2100 /* At some point, we should validate that this offset is within the object,
2101 if all the appropriate values are known. */
9ce37fa7 2102 return new_rtx;
e4e86ec5 2103}
2104
bf42c62d 2105/* Return a memory reference like MEMREF, but with its mode changed
2106 to MODE and its address changed to ADDR, which is assumed to be
f0b5f617 2107 MEMREF offset by OFFSET bytes. If VALIDATE is
bf42c62d 2108 nonzero, the memory address is forced to be valid. */
2109
2110rtx
35cb5232 2111adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2112 HOST_WIDE_INT offset, int validate)
bf42c62d 2113{
2114 memref = change_address_1 (memref, VOIDmode, addr, validate);
2115 return adjust_address_1 (memref, mode, offset, validate, 0);
2116}
2117
2a631e19 2118/* Return a memory reference like MEMREF, but whose address is changed by
2119 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2120 known to be in OFFSET (possibly 1). */
fcdc122e 2121
2122rtx
35cb5232 2123offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
fcdc122e 2124{
9ce37fa7 2125 rtx new_rtx, addr = XEXP (memref, 0);
d72886b5 2126 enum machine_mode address_mode;
2127 struct mem_attrs attrs;
fac6aae6 2128
d72886b5 2129 attrs = *get_mem_attrs (memref);
2130 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
98155838 2131 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2132
d4c5e26d 2133 /* At this point we don't know _why_ the address is invalid. It
917bbcab 2134 could have secondary memory references, multiplies or anything.
fac6aae6 2135
2136 However, if we did go and rearrange things, we can wind up not
2137 being able to recognize the magic around pic_offset_table_rtx.
2138 This stuff is fragile, and is yet another example of why it is
2139 bad to expose PIC machinery too early. */
d72886b5 2140 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2141 attrs.addrspace)
fac6aae6 2142 && GET_CODE (addr) == PLUS
2143 && XEXP (addr, 0) == pic_offset_table_rtx)
2144 {
2145 addr = force_reg (GET_MODE (addr), addr);
98155838 2146 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2147 }
2148
9ce37fa7 2149 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2150 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
fcdc122e 2151
d28edf0d 2152 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2153 if (new_rtx == memref)
2154 return new_rtx;
d28edf0d 2155
fcdc122e 2156 /* Update the alignment to reflect the offset. Reset the offset, which
2157 we don't know. */
d72886b5 2158 attrs.offset = 0;
2159 attrs.size = mode_mem_attrs[(int) GET_MODE (new_rtx)]->size;
2160 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2161 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2162 return new_rtx;
fcdc122e 2163}
d4c5e26d 2164
537ffcfc 2165/* Return a memory reference like MEMREF, but with its address changed to
2166 ADDR. The caller is asserting that the actual piece of memory pointed
2167 to is the same, just the form of the address is being changed, such as
2168 by putting something into a register. */
2169
2170rtx
35cb5232 2171replace_equiv_address (rtx memref, rtx addr)
537ffcfc 2172{
96216d37 2173 /* change_address_1 copies the memory attribute structure without change
2174 and that's exactly what we want here. */
ecfe4ca9 2175 update_temp_slot_address (XEXP (memref, 0), addr);
96216d37 2176 return change_address_1 (memref, VOIDmode, addr, 1);
537ffcfc 2177}
96216d37 2178
e4e86ec5 2179/* Likewise, but the reference is not required to be valid. */
2180
2181rtx
35cb5232 2182replace_equiv_address_nv (rtx memref, rtx addr)
e4e86ec5 2183{
e4e86ec5 2184 return change_address_1 (memref, VOIDmode, addr, 0);
2185}
8259ab07 2186
2187/* Return a memory reference like MEMREF, but with its mode widened to
2188 MODE and offset by OFFSET. This would be used by targets that e.g.
2189 cannot issue QImode memory operations and have to use SImode memory
2190 operations plus masking logic. */
2191
2192rtx
35cb5232 2193widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
8259ab07 2194{
9ce37fa7 2195 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
d72886b5 2196 struct mem_attrs attrs;
8259ab07 2197 unsigned int size = GET_MODE_SIZE (mode);
2198
d28edf0d 2199 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2200 if (new_rtx == memref)
2201 return new_rtx;
d28edf0d 2202
d72886b5 2203 attrs = *get_mem_attrs (new_rtx);
2204
8259ab07 2205 /* If we don't know what offset we were at within the expression, then
2206 we can't know if we've overstepped the bounds. */
d72886b5 2207 if (! attrs.offset)
2208 attrs.expr = NULL_TREE;
8259ab07 2209
d72886b5 2210 while (attrs.expr)
8259ab07 2211 {
d72886b5 2212 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
8259ab07 2213 {
d72886b5 2214 tree field = TREE_OPERAND (attrs.expr, 1);
2215 tree offset = component_ref_field_offset (attrs.expr);
8259ab07 2216
2217 if (! DECL_SIZE_UNIT (field))
2218 {
d72886b5 2219 attrs.expr = NULL_TREE;
8259ab07 2220 break;
2221 }
2222
2223 /* Is the field at least as large as the access? If so, ok,
2224 otherwise strip back to the containing structure. */
8359cfb4 2225 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2226 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
d72886b5 2227 && INTVAL (attrs.offset) >= 0)
8259ab07 2228 break;
2229
6374121b 2230 if (! host_integerp (offset, 1))
8259ab07 2231 {
d72886b5 2232 attrs.expr = NULL_TREE;
8259ab07 2233 break;
2234 }
2235
d72886b5 2236 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2237 attrs.offset
2238 = (GEN_INT (INTVAL (attrs.offset)
6374121b 2239 + tree_low_cst (offset, 1)
2240 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2241 / BITS_PER_UNIT)));
8259ab07 2242 }
2243 /* Similarly for the decl. */
d72886b5 2244 else if (DECL_P (attrs.expr)
2245 && DECL_SIZE_UNIT (attrs.expr)
2246 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2247 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2248 && (! attrs.offset || INTVAL (attrs.offset) >= 0))
8259ab07 2249 break;
2250 else
2251 {
2252 /* The widened memory access overflows the expression, which means
2253 that it could alias another expression. Zap it. */
d72886b5 2254 attrs.expr = NULL_TREE;
8259ab07 2255 break;
2256 }
2257 }
2258
d72886b5 2259 if (! attrs.expr)
2260 attrs.offset = NULL_RTX;
8259ab07 2261
2262 /* The widened memory may alias other stuff, so zap the alias set. */
2263 /* ??? Maybe use get_alias_set on any remaining expression. */
d72886b5 2264 attrs.alias = 0;
2265 attrs.size = GEN_INT (size);
2266 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2267 return new_rtx;
8259ab07 2268}
15bbde2b 2269\f
ac681e84 2270/* A fake decl that is used as the MEM_EXPR of spill slots. */
2271static GTY(()) tree spill_slot_decl;
2272
58029e61 2273tree
2274get_spill_slot_decl (bool force_build_p)
ac681e84 2275{
2276 tree d = spill_slot_decl;
2277 rtx rd;
d72886b5 2278 struct mem_attrs attrs;
ac681e84 2279
58029e61 2280 if (d || !force_build_p)
ac681e84 2281 return d;
2282
e60a6f7b 2283 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2284 VAR_DECL, get_identifier ("%sfp"), void_type_node);
ac681e84 2285 DECL_ARTIFICIAL (d) = 1;
2286 DECL_IGNORED_P (d) = 1;
2287 TREE_USED (d) = 1;
ac681e84 2288 spill_slot_decl = d;
2289
2290 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2291 MEM_NOTRAP_P (rd) = 1;
d72886b5 2292 attrs = *mode_mem_attrs[(int) BLKmode];
2293 attrs.alias = new_alias_set ();
2294 attrs.expr = d;
2295 set_mem_attrs (rd, &attrs);
ac681e84 2296 SET_DECL_RTL (d, rd);
2297
2298 return d;
2299}
2300
2301/* Given MEM, a result from assign_stack_local, fill in the memory
2302 attributes as appropriate for a register allocator spill slot.
2303 These slots are not aliasable by other memory. We arrange for
2304 them all to use a single MEM_EXPR, so that the aliasing code can
2305 work properly in the case of shared spill slots. */
2306
2307void
2308set_mem_attrs_for_spill (rtx mem)
2309{
d72886b5 2310 struct mem_attrs attrs;
2311 rtx addr;
ac681e84 2312
d72886b5 2313 attrs = *get_mem_attrs (mem);
2314 attrs.expr = get_spill_slot_decl (true);
2315 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2316 attrs.addrspace = ADDR_SPACE_GENERIC;
ac681e84 2317
2318 /* We expect the incoming memory to be of the form:
2319 (mem:MODE (plus (reg sfp) (const_int offset)))
2320 with perhaps the plus missing for offset = 0. */
2321 addr = XEXP (mem, 0);
d72886b5 2322 attrs.offset = const0_rtx;
ac681e84 2323 if (GET_CODE (addr) == PLUS
971ba038 2324 && CONST_INT_P (XEXP (addr, 1)))
d72886b5 2325 attrs.offset = XEXP (addr, 1);
ac681e84 2326
d72886b5 2327 set_mem_attrs (mem, &attrs);
ac681e84 2328 MEM_NOTRAP_P (mem) = 1;
2329}
2330\f
15bbde2b 2331/* Return a newly created CODE_LABEL rtx with a unique label number. */
2332
2333rtx
35cb5232 2334gen_label_rtx (void)
15bbde2b 2335{
a7ae1e59 2336 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
35cb5232 2337 NULL, label_num++, NULL);
15bbde2b 2338}
2339\f
2340/* For procedure integration. */
2341
15bbde2b 2342/* Install new pointers to the first and last insns in the chain.
d4c332ff 2343 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2344 Used for an inline-procedure after copying the insn chain. */
2345
2346void
35cb5232 2347set_new_first_and_last_insn (rtx first, rtx last)
15bbde2b 2348{
d4c332ff 2349 rtx insn;
2350
06f9d6ef 2351 set_first_insn (first);
2352 set_last_insn (last);
d4c332ff 2353 cur_insn_uid = 0;
2354
9845d120 2355 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2356 {
2357 int debug_count = 0;
2358
2359 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2360 cur_debug_insn_uid = 0;
2361
2362 for (insn = first; insn; insn = NEXT_INSN (insn))
2363 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2364 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2365 else
2366 {
2367 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2368 if (DEBUG_INSN_P (insn))
2369 debug_count++;
2370 }
2371
2372 if (debug_count)
2373 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2374 else
2375 cur_debug_insn_uid++;
2376 }
2377 else
2378 for (insn = first; insn; insn = NEXT_INSN (insn))
2379 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
d4c332ff 2380
2381 cur_insn_uid++;
15bbde2b 2382}
15bbde2b 2383\f
d823ba47 2384/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2385 structure. This routine should only be called once. */
15bbde2b 2386
a40c0eeb 2387static void
df329266 2388unshare_all_rtl_1 (rtx insn)
15bbde2b 2389{
2d96a59a 2390 /* Unshare just about everything else. */
1cd4cfea 2391 unshare_all_rtl_in_chain (insn);
d823ba47 2392
15bbde2b 2393 /* Make sure the addresses of stack slots found outside the insn chain
2394 (such as, in DECL_RTL of a variable) are not shared
2395 with the insn chain.
2396
2397 This special care is necessary when the stack slot MEM does not
2398 actually appear in the insn chain. If it does appear, its address
2399 is unshared from all else at that point. */
45733446 2400 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
15bbde2b 2401}
2402
d823ba47 2403/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2404 structure, again. This is a fairly expensive thing to do so it
2405 should be done sparingly. */
2406
2407void
35cb5232 2408unshare_all_rtl_again (rtx insn)
2d96a59a 2409{
2410 rtx p;
5244079b 2411 tree decl;
2412
2d96a59a 2413 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2414 if (INSN_P (p))
2d96a59a 2415 {
2416 reset_used_flags (PATTERN (p));
2417 reset_used_flags (REG_NOTES (p));
2d96a59a 2418 }
5244079b 2419
01dc9f0c 2420 /* Make sure that virtual stack slots are not shared. */
265be050 2421 set_used_decls (DECL_INITIAL (cfun->decl));
01dc9f0c 2422
5244079b 2423 /* Make sure that virtual parameters are not shared. */
1767a056 2424 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
265be050 2425 set_used_flags (DECL_RTL (decl));
5244079b 2426
2427 reset_used_flags (stack_slot_list);
2428
df329266 2429 unshare_all_rtl_1 (insn);
a40c0eeb 2430}
2431
2a1990e9 2432unsigned int
a40c0eeb 2433unshare_all_rtl (void)
2434{
df329266 2435 unshare_all_rtl_1 (get_insns ());
2a1990e9 2436 return 0;
2d96a59a 2437}
2438
20099e35 2439struct rtl_opt_pass pass_unshare_all_rtl =
77fce4cd 2440{
20099e35 2441 {
2442 RTL_PASS,
228967a9 2443 "unshare", /* name */
77fce4cd 2444 NULL, /* gate */
2445 unshare_all_rtl, /* execute */
2446 NULL, /* sub */
2447 NULL, /* next */
2448 0, /* static_pass_number */
0b1615c1 2449 TV_NONE, /* tv_id */
77fce4cd 2450 0, /* properties_required */
2451 0, /* properties_provided */
2452 0, /* properties_destroyed */
2453 0, /* todo_flags_start */
771e2890 2454 TODO_verify_rtl_sharing /* todo_flags_finish */
20099e35 2455 }
77fce4cd 2456};
2457
2458
1cd4cfea 2459/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2460 Recursively does the same for subexpressions. */
2461
2462static void
2463verify_rtx_sharing (rtx orig, rtx insn)
2464{
2465 rtx x = orig;
2466 int i;
2467 enum rtx_code code;
2468 const char *format_ptr;
2469
2470 if (x == 0)
2471 return;
2472
2473 code = GET_CODE (x);
2474
2475 /* These types may be freely shared. */
2476
2477 switch (code)
2478 {
2479 case REG:
688ff29b 2480 case DEBUG_EXPR:
2481 case VALUE:
1cd4cfea 2482 case CONST_INT:
2483 case CONST_DOUBLE:
e397ad8e 2484 case CONST_FIXED:
1cd4cfea 2485 case CONST_VECTOR:
2486 case SYMBOL_REF:
2487 case LABEL_REF:
2488 case CODE_LABEL:
2489 case PC:
2490 case CC0:
1a860023 2491 case RETURN:
1cd4cfea 2492 case SCRATCH:
1cd4cfea 2493 return;
c09425a0 2494 /* SCRATCH must be shared because they represent distinct values. */
2495 case CLOBBER:
2496 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2497 return;
2498 break;
1cd4cfea 2499
2500 case CONST:
3072d30e 2501 if (shared_const_p (orig))
1cd4cfea 2502 return;
2503 break;
2504
2505 case MEM:
2506 /* A MEM is allowed to be shared if its address is constant. */
2507 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2508 || reload_completed || reload_in_progress)
2509 return;
2510
2511 break;
2512
2513 default:
2514 break;
2515 }
2516
2517 /* This rtx may not be shared. If it has already been seen,
2518 replace it with a copy of itself. */
9cee7c3f 2519#ifdef ENABLE_CHECKING
1cd4cfea 2520 if (RTX_FLAG (x, used))
2521 {
0a81f5a0 2522 error ("invalid rtl sharing found in the insn");
1cd4cfea 2523 debug_rtx (insn);
0a81f5a0 2524 error ("shared rtx");
1cd4cfea 2525 debug_rtx (x);
0a81f5a0 2526 internal_error ("internal consistency failure");
1cd4cfea 2527 }
9cee7c3f 2528#endif
2529 gcc_assert (!RTX_FLAG (x, used));
48e1416a 2530
1cd4cfea 2531 RTX_FLAG (x, used) = 1;
2532
8b332087 2533 /* Now scan the subexpressions recursively. */
1cd4cfea 2534
2535 format_ptr = GET_RTX_FORMAT (code);
2536
2537 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2538 {
2539 switch (*format_ptr++)
2540 {
2541 case 'e':
2542 verify_rtx_sharing (XEXP (x, i), insn);
2543 break;
2544
2545 case 'E':
2546 if (XVEC (x, i) != NULL)
2547 {
2548 int j;
2549 int len = XVECLEN (x, i);
2550
2551 for (j = 0; j < len; j++)
2552 {
9cee7c3f 2553 /* We allow sharing of ASM_OPERANDS inside single
2554 instruction. */
1cd4cfea 2555 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
9cee7c3f 2556 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2557 == ASM_OPERANDS))
1cd4cfea 2558 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2559 else
2560 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2561 }
2562 }
2563 break;
2564 }
2565 }
2566 return;
2567}
2568
c7bf1374 2569/* Go through all the RTL insn bodies and check that there is no unexpected
1cd4cfea 2570 sharing in between the subexpressions. */
2571
4b987fac 2572DEBUG_FUNCTION void
1cd4cfea 2573verify_rtl_sharing (void)
2574{
2575 rtx p;
2576
4b366dd3 2577 timevar_push (TV_VERIFY_RTL_SHARING);
2578
1cd4cfea 2579 for (p = get_insns (); p; p = NEXT_INSN (p))
2580 if (INSN_P (p))
2581 {
2582 reset_used_flags (PATTERN (p));
2583 reset_used_flags (REG_NOTES (p));
764f640f 2584 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2585 {
2586 int i;
2587 rtx q, sequence = PATTERN (p);
2588
2589 for (i = 0; i < XVECLEN (sequence, 0); i++)
2590 {
2591 q = XVECEXP (sequence, 0, i);
2592 gcc_assert (INSN_P (q));
2593 reset_used_flags (PATTERN (q));
2594 reset_used_flags (REG_NOTES (q));
764f640f 2595 }
2596 }
1cd4cfea 2597 }
2598
2599 for (p = get_insns (); p; p = NEXT_INSN (p))
2600 if (INSN_P (p))
2601 {
2602 verify_rtx_sharing (PATTERN (p), p);
2603 verify_rtx_sharing (REG_NOTES (p), p);
1cd4cfea 2604 }
4b366dd3 2605
2606 timevar_pop (TV_VERIFY_RTL_SHARING);
1cd4cfea 2607}
2608
2d96a59a 2609/* Go through all the RTL insn bodies and copy any invalid shared structure.
2610 Assumes the mark bits are cleared at entry. */
2611
1cd4cfea 2612void
2613unshare_all_rtl_in_chain (rtx insn)
2d96a59a 2614{
2615 for (; insn; insn = NEXT_INSN (insn))
9204e736 2616 if (INSN_P (insn))
2d96a59a 2617 {
2618 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2619 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2d96a59a 2620 }
2621}
2622
01dc9f0c 2623/* Go through all virtual stack slots of a function and mark them as
265be050 2624 shared. We never replace the DECL_RTLs themselves with a copy,
2625 but expressions mentioned into a DECL_RTL cannot be shared with
2626 expressions in the instruction stream.
2627
2628 Note that reload may convert pseudo registers into memories in-place.
2629 Pseudo registers are always shared, but MEMs never are. Thus if we
2630 reset the used flags on MEMs in the instruction stream, we must set
2631 them again on MEMs that appear in DECL_RTLs. */
2632
01dc9f0c 2633static void
265be050 2634set_used_decls (tree blk)
01dc9f0c 2635{
2636 tree t;
2637
2638 /* Mark decls. */
1767a056 2639 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
0e8e37b2 2640 if (DECL_RTL_SET_P (t))
265be050 2641 set_used_flags (DECL_RTL (t));
01dc9f0c 2642
2643 /* Now process sub-blocks. */
93110716 2644 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
265be050 2645 set_used_decls (t);
01dc9f0c 2646}
2647
15bbde2b 2648/* Mark ORIG as in use, and return a copy of it if it was already in use.
7ba6ce7a 2649 Recursively does the same for subexpressions. Uses
2650 copy_rtx_if_shared_1 to reduce stack space. */
15bbde2b 2651
2652rtx
35cb5232 2653copy_rtx_if_shared (rtx orig)
15bbde2b 2654{
0e0727c4 2655 copy_rtx_if_shared_1 (&orig);
2656 return orig;
2657}
2658
7ba6ce7a 2659/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2660 use. Recursively does the same for subexpressions. */
2661
0e0727c4 2662static void
2663copy_rtx_if_shared_1 (rtx *orig1)
2664{
2665 rtx x;
19cb6b50 2666 int i;
2667 enum rtx_code code;
0e0727c4 2668 rtx *last_ptr;
19cb6b50 2669 const char *format_ptr;
15bbde2b 2670 int copied = 0;
0e0727c4 2671 int length;
2672
2673 /* Repeat is used to turn tail-recursion into iteration. */
2674repeat:
2675 x = *orig1;
15bbde2b 2676
2677 if (x == 0)
0e0727c4 2678 return;
15bbde2b 2679
2680 code = GET_CODE (x);
2681
2682 /* These types may be freely shared. */
2683
2684 switch (code)
2685 {
2686 case REG:
688ff29b 2687 case DEBUG_EXPR:
2688 case VALUE:
15bbde2b 2689 case CONST_INT:
2690 case CONST_DOUBLE:
e397ad8e 2691 case CONST_FIXED:
886cfd4f 2692 case CONST_VECTOR:
15bbde2b 2693 case SYMBOL_REF:
1cd4cfea 2694 case LABEL_REF:
15bbde2b 2695 case CODE_LABEL:
2696 case PC:
2697 case CC0:
2698 case SCRATCH:
a92771b8 2699 /* SCRATCH must be shared because they represent distinct values. */
0e0727c4 2700 return;
c09425a0 2701 case CLOBBER:
2702 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2703 return;
2704 break;
15bbde2b 2705
f63d12e3 2706 case CONST:
3072d30e 2707 if (shared_const_p (x))
0e0727c4 2708 return;
f63d12e3 2709 break;
2710
9845d120 2711 case DEBUG_INSN:
15bbde2b 2712 case INSN:
2713 case JUMP_INSN:
2714 case CALL_INSN:
2715 case NOTE:
15bbde2b 2716 case BARRIER:
2717 /* The chain of insns is not being copied. */
0e0727c4 2718 return;
15bbde2b 2719
0dbd1c74 2720 default:
2721 break;
15bbde2b 2722 }
2723
2724 /* This rtx may not be shared. If it has already been seen,
2725 replace it with a copy of itself. */
2726
7c25cb91 2727 if (RTX_FLAG (x, used))
15bbde2b 2728 {
f2d0e9f1 2729 x = shallow_copy_rtx (x);
15bbde2b 2730 copied = 1;
2731 }
7c25cb91 2732 RTX_FLAG (x, used) = 1;
15bbde2b 2733
2734 /* Now scan the subexpressions recursively.
2735 We can store any replaced subexpressions directly into X
2736 since we know X is not shared! Any vectors in X
2737 must be copied if X was copied. */
2738
2739 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2740 length = GET_RTX_LENGTH (code);
2741 last_ptr = NULL;
48e1416a 2742
0e0727c4 2743 for (i = 0; i < length; i++)
15bbde2b 2744 {
2745 switch (*format_ptr++)
2746 {
2747 case 'e':
0e0727c4 2748 if (last_ptr)
2749 copy_rtx_if_shared_1 (last_ptr);
2750 last_ptr = &XEXP (x, i);
15bbde2b 2751 break;
2752
2753 case 'E':
2754 if (XVEC (x, i) != NULL)
2755 {
19cb6b50 2756 int j;
ffe0869b 2757 int len = XVECLEN (x, i);
48e1416a 2758
8b332087 2759 /* Copy the vector iff I copied the rtx and the length
2760 is nonzero. */
ffe0869b 2761 if (copied && len > 0)
a4070a91 2762 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
48e1416a 2763
d632b59a 2764 /* Call recursively on all inside the vector. */
ffe0869b 2765 for (j = 0; j < len; j++)
0e0727c4 2766 {
2767 if (last_ptr)
2768 copy_rtx_if_shared_1 (last_ptr);
2769 last_ptr = &XVECEXP (x, i, j);
2770 }
15bbde2b 2771 }
2772 break;
2773 }
2774 }
0e0727c4 2775 *orig1 = x;
2776 if (last_ptr)
2777 {
2778 orig1 = last_ptr;
2779 goto repeat;
2780 }
2781 return;
15bbde2b 2782}
2783
709947e6 2784/* Set the USED bit in X and its non-shareable subparts to FLAG. */
15bbde2b 2785
709947e6 2786static void
2787mark_used_flags (rtx x, int flag)
15bbde2b 2788{
19cb6b50 2789 int i, j;
2790 enum rtx_code code;
2791 const char *format_ptr;
0e0727c4 2792 int length;
15bbde2b 2793
0e0727c4 2794 /* Repeat is used to turn tail-recursion into iteration. */
2795repeat:
15bbde2b 2796 if (x == 0)
2797 return;
2798
2799 code = GET_CODE (x);
2800
c3418f42 2801 /* These types may be freely shared so we needn't do any resetting
15bbde2b 2802 for them. */
2803
2804 switch (code)
2805 {
2806 case REG:
688ff29b 2807 case DEBUG_EXPR:
2808 case VALUE:
15bbde2b 2809 case CONST_INT:
2810 case CONST_DOUBLE:
e397ad8e 2811 case CONST_FIXED:
886cfd4f 2812 case CONST_VECTOR:
15bbde2b 2813 case SYMBOL_REF:
2814 case CODE_LABEL:
2815 case PC:
2816 case CC0:
2817 return;
2818
9845d120 2819 case DEBUG_INSN:
15bbde2b 2820 case INSN:
2821 case JUMP_INSN:
2822 case CALL_INSN:
2823 case NOTE:
2824 case LABEL_REF:
2825 case BARRIER:
2826 /* The chain of insns is not being copied. */
2827 return;
d823ba47 2828
0dbd1c74 2829 default:
2830 break;
15bbde2b 2831 }
2832
709947e6 2833 RTX_FLAG (x, used) = flag;
15bbde2b 2834
2835 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2836 length = GET_RTX_LENGTH (code);
48e1416a 2837
0e0727c4 2838 for (i = 0; i < length; i++)
15bbde2b 2839 {
2840 switch (*format_ptr++)
2841 {
2842 case 'e':
0e0727c4 2843 if (i == length-1)
2844 {
2845 x = XEXP (x, i);
2846 goto repeat;
2847 }
709947e6 2848 mark_used_flags (XEXP (x, i), flag);
15bbde2b 2849 break;
2850
2851 case 'E':
2852 for (j = 0; j < XVECLEN (x, i); j++)
709947e6 2853 mark_used_flags (XVECEXP (x, i, j), flag);
15bbde2b 2854 break;
2855 }
2856 }
2857}
1cd4cfea 2858
709947e6 2859/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1cd4cfea 2860 to look for shared sub-parts. */
2861
2862void
709947e6 2863reset_used_flags (rtx x)
1cd4cfea 2864{
709947e6 2865 mark_used_flags (x, 0);
2866}
1cd4cfea 2867
709947e6 2868/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2869 to look for shared sub-parts. */
1cd4cfea 2870
709947e6 2871void
2872set_used_flags (rtx x)
2873{
2874 mark_used_flags (x, 1);
1cd4cfea 2875}
15bbde2b 2876\f
2877/* Copy X if necessary so that it won't be altered by changes in OTHER.
2878 Return X or the rtx for the pseudo reg the value of X was copied into.
2879 OTHER must be valid as a SET_DEST. */
2880
2881rtx
35cb5232 2882make_safe_from (rtx x, rtx other)
15bbde2b 2883{
2884 while (1)
2885 switch (GET_CODE (other))
2886 {
2887 case SUBREG:
2888 other = SUBREG_REG (other);
2889 break;
2890 case STRICT_LOW_PART:
2891 case SIGN_EXTEND:
2892 case ZERO_EXTEND:
2893 other = XEXP (other, 0);
2894 break;
2895 default:
2896 goto done;
2897 }
2898 done:
e16ceb8e 2899 if ((MEM_P (other)
15bbde2b 2900 && ! CONSTANT_P (x)
8ad4c111 2901 && !REG_P (x)
15bbde2b 2902 && GET_CODE (x) != SUBREG)
8ad4c111 2903 || (REG_P (other)
15bbde2b 2904 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2905 || reg_mentioned_p (other, x))))
2906 {
2907 rtx temp = gen_reg_rtx (GET_MODE (x));
2908 emit_move_insn (temp, x);
2909 return temp;
2910 }
2911 return x;
2912}
2913\f
2914/* Emission of insns (adding them to the doubly-linked list). */
2915
15bbde2b 2916/* Return the last insn emitted, even if it is in a sequence now pushed. */
2917
2918rtx
35cb5232 2919get_last_insn_anywhere (void)
15bbde2b 2920{
2921 struct sequence_stack *stack;
06f9d6ef 2922 if (get_last_insn ())
2923 return get_last_insn ();
0a893c29 2924 for (stack = seq_stack; stack; stack = stack->next)
15bbde2b 2925 if (stack->last != 0)
2926 return stack->last;
2927 return 0;
2928}
2929
70545de4 2930/* Return the first nonnote insn emitted in current sequence or current
2931 function. This routine looks inside SEQUENCEs. */
2932
2933rtx
35cb5232 2934get_first_nonnote_insn (void)
70545de4 2935{
06f9d6ef 2936 rtx insn = get_insns ();
f86e856e 2937
2938 if (insn)
2939 {
2940 if (NOTE_P (insn))
2941 for (insn = next_insn (insn);
2942 insn && NOTE_P (insn);
2943 insn = next_insn (insn))
2944 continue;
2945 else
2946 {
1c14a50e 2947 if (NONJUMP_INSN_P (insn)
f86e856e 2948 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2949 insn = XVECEXP (PATTERN (insn), 0, 0);
2950 }
2951 }
70545de4 2952
2953 return insn;
2954}
2955
2956/* Return the last nonnote insn emitted in current sequence or current
2957 function. This routine looks inside SEQUENCEs. */
2958
2959rtx
35cb5232 2960get_last_nonnote_insn (void)
70545de4 2961{
06f9d6ef 2962 rtx insn = get_last_insn ();
f86e856e 2963
2964 if (insn)
2965 {
2966 if (NOTE_P (insn))
2967 for (insn = previous_insn (insn);
2968 insn && NOTE_P (insn);
2969 insn = previous_insn (insn))
2970 continue;
2971 else
2972 {
1c14a50e 2973 if (NONJUMP_INSN_P (insn)
f86e856e 2974 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2975 insn = XVECEXP (PATTERN (insn), 0,
2976 XVECLEN (PATTERN (insn), 0) - 1);
2977 }
2978 }
70545de4 2979
2980 return insn;
2981}
2982
9845d120 2983/* Return the number of actual (non-debug) insns emitted in this
2984 function. */
2985
2986int
2987get_max_insn_count (void)
2988{
2989 int n = cur_insn_uid;
2990
2991 /* The table size must be stable across -g, to avoid codegen
2992 differences due to debug insns, and not be affected by
2993 -fmin-insn-uid, to avoid excessive table size and to simplify
2994 debugging of -fcompare-debug failures. */
2995 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2996 n -= cur_debug_insn_uid;
2997 else
2998 n -= MIN_NONDEBUG_INSN_UID;
2999
3000 return n;
3001}
3002
15bbde2b 3003\f
3004/* Return the next insn. If it is a SEQUENCE, return the first insn
3005 of the sequence. */
3006
3007rtx
35cb5232 3008next_insn (rtx insn)
15bbde2b 3009{
ce4469fa 3010 if (insn)
3011 {
3012 insn = NEXT_INSN (insn);
3013 if (insn && NONJUMP_INSN_P (insn)
3014 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3015 insn = XVECEXP (PATTERN (insn), 0, 0);
3016 }
15bbde2b 3017
ce4469fa 3018 return insn;
15bbde2b 3019}
3020
3021/* Return the previous insn. If it is a SEQUENCE, return the last insn
3022 of the sequence. */
3023
3024rtx
35cb5232 3025previous_insn (rtx insn)
15bbde2b 3026{
ce4469fa 3027 if (insn)
3028 {
3029 insn = PREV_INSN (insn);
3030 if (insn && NONJUMP_INSN_P (insn)
3031 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3032 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3033 }
15bbde2b 3034
ce4469fa 3035 return insn;
15bbde2b 3036}
3037
3038/* Return the next insn after INSN that is not a NOTE. This routine does not
3039 look inside SEQUENCEs. */
3040
3041rtx
35cb5232 3042next_nonnote_insn (rtx insn)
15bbde2b 3043{
ce4469fa 3044 while (insn)
3045 {
3046 insn = NEXT_INSN (insn);
3047 if (insn == 0 || !NOTE_P (insn))
3048 break;
3049 }
15bbde2b 3050
ce4469fa 3051 return insn;
15bbde2b 3052}
3053
c4d13c5c 3054/* Return the next insn after INSN that is not a NOTE, but stop the
3055 search before we enter another basic block. This routine does not
3056 look inside SEQUENCEs. */
3057
3058rtx
3059next_nonnote_insn_bb (rtx insn)
3060{
3061 while (insn)
3062 {
3063 insn = NEXT_INSN (insn);
3064 if (insn == 0 || !NOTE_P (insn))
3065 break;
3066 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3067 return NULL_RTX;
3068 }
3069
3070 return insn;
3071}
3072
15bbde2b 3073/* Return the previous insn before INSN that is not a NOTE. This routine does
3074 not look inside SEQUENCEs. */
3075
3076rtx
35cb5232 3077prev_nonnote_insn (rtx insn)
15bbde2b 3078{
ce4469fa 3079 while (insn)
3080 {
3081 insn = PREV_INSN (insn);
3082 if (insn == 0 || !NOTE_P (insn))
3083 break;
3084 }
15bbde2b 3085
ce4469fa 3086 return insn;
15bbde2b 3087}
3088
bcc66782 3089/* Return the previous insn before INSN that is not a NOTE, but stop
3090 the search before we enter another basic block. This routine does
3091 not look inside SEQUENCEs. */
3092
3093rtx
3094prev_nonnote_insn_bb (rtx insn)
3095{
3096 while (insn)
3097 {
3098 insn = PREV_INSN (insn);
3099 if (insn == 0 || !NOTE_P (insn))
3100 break;
3101 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3102 return NULL_RTX;
3103 }
3104
3105 return insn;
3106}
3107
9845d120 3108/* Return the next insn after INSN that is not a DEBUG_INSN. This
3109 routine does not look inside SEQUENCEs. */
3110
3111rtx
3112next_nondebug_insn (rtx insn)
3113{
3114 while (insn)
3115 {
3116 insn = NEXT_INSN (insn);
3117 if (insn == 0 || !DEBUG_INSN_P (insn))
3118 break;
3119 }
3120
3121 return insn;
3122}
3123
3124/* Return the previous insn before INSN that is not a DEBUG_INSN.
3125 This routine does not look inside SEQUENCEs. */
3126
3127rtx
3128prev_nondebug_insn (rtx insn)
3129{
3130 while (insn)
3131 {
3132 insn = PREV_INSN (insn);
3133 if (insn == 0 || !DEBUG_INSN_P (insn))
3134 break;
3135 }
3136
3137 return insn;
3138}
3139
5b8537a8 3140/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3141 This routine does not look inside SEQUENCEs. */
3142
3143rtx
3144next_nonnote_nondebug_insn (rtx insn)
3145{
3146 while (insn)
3147 {
3148 insn = NEXT_INSN (insn);
3149 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3150 break;
3151 }
3152
3153 return insn;
3154}
3155
3156/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3157 This routine does not look inside SEQUENCEs. */
3158
3159rtx
3160prev_nonnote_nondebug_insn (rtx insn)
3161{
3162 while (insn)
3163 {
3164 insn = PREV_INSN (insn);
3165 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3166 break;
3167 }
3168
3169 return insn;
3170}
3171
15bbde2b 3172/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3173 or 0, if there is none. This routine does not look inside
a92771b8 3174 SEQUENCEs. */
15bbde2b 3175
3176rtx
35cb5232 3177next_real_insn (rtx insn)
15bbde2b 3178{
ce4469fa 3179 while (insn)
3180 {
3181 insn = NEXT_INSN (insn);
3182 if (insn == 0 || INSN_P (insn))
3183 break;
3184 }
15bbde2b 3185
ce4469fa 3186 return insn;
15bbde2b 3187}
3188
3189/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3190 or 0, if there is none. This routine does not look inside
3191 SEQUENCEs. */
3192
3193rtx
35cb5232 3194prev_real_insn (rtx insn)
15bbde2b 3195{
ce4469fa 3196 while (insn)
3197 {
3198 insn = PREV_INSN (insn);
3199 if (insn == 0 || INSN_P (insn))
3200 break;
3201 }
15bbde2b 3202
ce4469fa 3203 return insn;
15bbde2b 3204}
3205
d5f9786f 3206/* Return the last CALL_INSN in the current list, or 0 if there is none.
3207 This routine does not look inside SEQUENCEs. */
3208
3209rtx
35cb5232 3210last_call_insn (void)
d5f9786f 3211{
3212 rtx insn;
3213
3214 for (insn = get_last_insn ();
6d7dc5b9 3215 insn && !CALL_P (insn);
d5f9786f 3216 insn = PREV_INSN (insn))
3217 ;
3218
3219 return insn;
3220}
3221
15bbde2b 3222/* Find the next insn after INSN that really does something. This routine
084950ee 3223 does not look inside SEQUENCEs. After reload this also skips over
3224 standalone USE and CLOBBER insn. */
15bbde2b 3225
2215ca0d 3226int
52d07779 3227active_insn_p (const_rtx insn)
2215ca0d 3228{
6d7dc5b9 3229 return (CALL_P (insn) || JUMP_P (insn)
3230 || (NONJUMP_INSN_P (insn)
3a66feab 3231 && (! reload_completed
3232 || (GET_CODE (PATTERN (insn)) != USE
3233 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3234}
3235
15bbde2b 3236rtx
35cb5232 3237next_active_insn (rtx insn)
15bbde2b 3238{
ce4469fa 3239 while (insn)
3240 {
3241 insn = NEXT_INSN (insn);
3242 if (insn == 0 || active_insn_p (insn))
3243 break;
3244 }
15bbde2b 3245
ce4469fa 3246 return insn;
15bbde2b 3247}
3248
3249/* Find the last insn before INSN that really does something. This routine
084950ee 3250 does not look inside SEQUENCEs. After reload this also skips over
3251 standalone USE and CLOBBER insn. */
15bbde2b 3252
3253rtx
35cb5232 3254prev_active_insn (rtx insn)
15bbde2b 3255{
ce4469fa 3256 while (insn)
3257 {
3258 insn = PREV_INSN (insn);
3259 if (insn == 0 || active_insn_p (insn))
3260 break;
3261 }
15bbde2b 3262
ce4469fa 3263 return insn;
15bbde2b 3264}
3265
3266/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3267
3268rtx
35cb5232 3269next_label (rtx insn)
15bbde2b 3270{
ce4469fa 3271 while (insn)
3272 {
3273 insn = NEXT_INSN (insn);
3274 if (insn == 0 || LABEL_P (insn))
3275 break;
3276 }
15bbde2b 3277
ce4469fa 3278 return insn;
15bbde2b 3279}
3280
3281/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3282
3283rtx
35cb5232 3284prev_label (rtx insn)
15bbde2b 3285{
ce4469fa 3286 while (insn)
3287 {
3288 insn = PREV_INSN (insn);
3289 if (insn == 0 || LABEL_P (insn))
3290 break;
3291 }
15bbde2b 3292
ce4469fa 3293 return insn;
15bbde2b 3294}
67c5e2a9 3295
3296/* Return the last label to mark the same position as LABEL. Return null
3297 if LABEL itself is null. */
3298
3299rtx
3300skip_consecutive_labels (rtx label)
3301{
3302 rtx insn;
3303
3304 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3305 if (LABEL_P (insn))
3306 label = insn;
3307
3308 return label;
3309}
15bbde2b 3310\f
3311#ifdef HAVE_cc0
b15e0bba 3312/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3313 and REG_CC_USER notes so we can find it. */
3314
3315void
35cb5232 3316link_cc0_insns (rtx insn)
b15e0bba 3317{
3318 rtx user = next_nonnote_insn (insn);
3319
6d7dc5b9 3320 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
b15e0bba 3321 user = XVECEXP (PATTERN (user), 0, 0);
3322
a1ddb869 3323 add_reg_note (user, REG_CC_SETTER, insn);
3324 add_reg_note (insn, REG_CC_USER, user);
b15e0bba 3325}
3326
15bbde2b 3327/* Return the next insn that uses CC0 after INSN, which is assumed to
3328 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3329 applied to the result of this function should yield INSN).
3330
3331 Normally, this is simply the next insn. However, if a REG_CC_USER note
3332 is present, it contains the insn that uses CC0.
3333
3334 Return 0 if we can't find the insn. */
3335
3336rtx
35cb5232 3337next_cc0_user (rtx insn)
15bbde2b 3338{
b572011e 3339 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3340
3341 if (note)
3342 return XEXP (note, 0);
3343
3344 insn = next_nonnote_insn (insn);
6d7dc5b9 3345 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
15bbde2b 3346 insn = XVECEXP (PATTERN (insn), 0, 0);
3347
9204e736 3348 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
15bbde2b 3349 return insn;
3350
3351 return 0;
3352}
3353
3354/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3355 note, it is the previous insn. */
3356
3357rtx
35cb5232 3358prev_cc0_setter (rtx insn)
15bbde2b 3359{
b572011e 3360 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3361
3362 if (note)
3363 return XEXP (note, 0);
3364
3365 insn = prev_nonnote_insn (insn);
611234b4 3366 gcc_assert (sets_cc0_p (PATTERN (insn)));
15bbde2b 3367
3368 return insn;
3369}
3370#endif
344dc2fa 3371
698ff1f0 3372#ifdef AUTO_INC_DEC
3373/* Find a RTX_AUTOINC class rtx which matches DATA. */
3374
3375static int
3376find_auto_inc (rtx *xp, void *data)
3377{
3378 rtx x = *xp;
225ab426 3379 rtx reg = (rtx) data;
698ff1f0 3380
3381 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3382 return 0;
3383
3384 switch (GET_CODE (x))
3385 {
3386 case PRE_DEC:
3387 case PRE_INC:
3388 case POST_DEC:
3389 case POST_INC:
3390 case PRE_MODIFY:
3391 case POST_MODIFY:
3392 if (rtx_equal_p (reg, XEXP (x, 0)))
3393 return 1;
3394 break;
3395
3396 default:
3397 gcc_unreachable ();
3398 }
3399 return -1;
3400}
3401#endif
3402
344dc2fa 3403/* Increment the label uses for all labels present in rtx. */
3404
3405static void
35cb5232 3406mark_label_nuses (rtx x)
344dc2fa 3407{
19cb6b50 3408 enum rtx_code code;
3409 int i, j;
3410 const char *fmt;
344dc2fa 3411
3412 code = GET_CODE (x);
a030d4a8 3413 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
344dc2fa 3414 LABEL_NUSES (XEXP (x, 0))++;
3415
3416 fmt = GET_RTX_FORMAT (code);
3417 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3418 {
3419 if (fmt[i] == 'e')
ff385626 3420 mark_label_nuses (XEXP (x, i));
344dc2fa 3421 else if (fmt[i] == 'E')
ff385626 3422 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3423 mark_label_nuses (XVECEXP (x, i, j));
3424 }
3425}
3426
15bbde2b 3427\f
3428/* Try splitting insns that can be split for better scheduling.
3429 PAT is the pattern which might split.
3430 TRIAL is the insn providing PAT.
6ef828f9 3431 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3432
3433 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3434 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3435 returns TRIAL. If the insn to be returned can be split, it will be. */
3436
3437rtx
35cb5232 3438try_split (rtx pat, rtx trial, int last)
15bbde2b 3439{
3440 rtx before = PREV_INSN (trial);
3441 rtx after = NEXT_INSN (trial);
15bbde2b 3442 int has_barrier = 0;
1e5b92fa 3443 rtx note, seq, tem;
3cd757b1 3444 int probability;
e13693ec 3445 rtx insn_last, insn;
3446 int njumps = 0;
3cd757b1 3447
25e880b1 3448 /* We're not good at redistributing frame information. */
3449 if (RTX_FRAME_RELATED_P (trial))
3450 return trial;
3451
3cd757b1 3452 if (any_condjump_p (trial)
3453 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3454 split_branch_probability = INTVAL (XEXP (note, 0));
3455 probability = split_branch_probability;
3456
3457 seq = split_insns (pat, trial);
3458
3459 split_branch_probability = -1;
15bbde2b 3460
3461 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3462 We may need to handle this specially. */
6d7dc5b9 3463 if (after && BARRIER_P (after))
15bbde2b 3464 {
3465 has_barrier = 1;
3466 after = NEXT_INSN (after);
3467 }
3468
e13693ec 3469 if (!seq)
3470 return trial;
3471
3472 /* Avoid infinite loop if any insn of the result matches
3473 the original pattern. */
3474 insn_last = seq;
3475 while (1)
15bbde2b 3476 {
e13693ec 3477 if (INSN_P (insn_last)
3478 && rtx_equal_p (PATTERN (insn_last), pat))
3479 return trial;
3480 if (!NEXT_INSN (insn_last))
3481 break;
3482 insn_last = NEXT_INSN (insn_last);
3483 }
d823ba47 3484
3072d30e 3485 /* We will be adding the new sequence to the function. The splitters
3486 may have introduced invalid RTL sharing, so unshare the sequence now. */
3487 unshare_all_rtl_in_chain (seq);
3488
e13693ec 3489 /* Mark labels. */
3490 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3491 {
6d7dc5b9 3492 if (JUMP_P (insn))
e13693ec 3493 {
3494 mark_jump_label (PATTERN (insn), insn, 0);
3495 njumps++;
3496 if (probability != -1
3497 && any_condjump_p (insn)
3498 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3499 {
e13693ec 3500 /* We can preserve the REG_BR_PROB notes only if exactly
3501 one jump is created, otherwise the machine description
3502 is responsible for this step using
3503 split_branch_probability variable. */
611234b4 3504 gcc_assert (njumps == 1);
a1ddb869 3505 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
31d3e01c 3506 }
e13693ec 3507 }
3508 }
3509
3510 /* If we are splitting a CALL_INSN, look for the CALL_INSN
b0bd0491 3511 in SEQ and copy any additional information across. */
6d7dc5b9 3512 if (CALL_P (trial))
e13693ec 3513 {
3514 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
6d7dc5b9 3515 if (CALL_P (insn))
e13693ec 3516 {
b0bd0491 3517 rtx next, *p;
3518
3519 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3520 target may have explicitly specified. */
3521 p = &CALL_INSN_FUNCTION_USAGE (insn);
0bb5a6cd 3522 while (*p)
3523 p = &XEXP (*p, 1);
3524 *p = CALL_INSN_FUNCTION_USAGE (trial);
b0bd0491 3525
3526 /* If the old call was a sibling call, the new one must
3527 be too. */
e13693ec 3528 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
b0bd0491 3529
3530 /* If the new call is the last instruction in the sequence,
3531 it will effectively replace the old call in-situ. Otherwise
3532 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3533 so that it comes immediately after the new call. */
3534 if (NEXT_INSN (insn))
47e1410d 3535 for (next = NEXT_INSN (trial);
3536 next && NOTE_P (next);
3537 next = NEXT_INSN (next))
3538 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
b0bd0491 3539 {
3540 remove_insn (next);
3541 add_insn_after (next, insn, NULL);
47e1410d 3542 break;
b0bd0491 3543 }
e13693ec 3544 }
3545 }
5262c253 3546
e13693ec 3547 /* Copy notes, particularly those related to the CFG. */
3548 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3549 {
3550 switch (REG_NOTE_KIND (note))
3551 {
3552 case REG_EH_REGION:
e38def9c 3553 copy_reg_eh_region_note_backward (note, insn_last, NULL);
e13693ec 3554 break;
381eb1e7 3555
e13693ec 3556 case REG_NORETURN:
3557 case REG_SETJMP:
698ff1f0 3558 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
381eb1e7 3559 {
6d7dc5b9 3560 if (CALL_P (insn))
a1ddb869 3561 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
381eb1e7 3562 }
e13693ec 3563 break;
5bb27a4b 3564
e13693ec 3565 case REG_NON_LOCAL_GOTO:
698ff1f0 3566 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
31d3e01c 3567 {
6d7dc5b9 3568 if (JUMP_P (insn))
a1ddb869 3569 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
31d3e01c 3570 }
e13693ec 3571 break;
344dc2fa 3572
698ff1f0 3573#ifdef AUTO_INC_DEC
3574 case REG_INC:
3575 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3576 {
3577 rtx reg = XEXP (note, 0);
3578 if (!FIND_REG_INC_NOTE (insn, reg)
3579 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
a1ddb869 3580 add_reg_note (insn, REG_INC, reg);
698ff1f0 3581 }
3582 break;
3583#endif
3584
e13693ec 3585 default:
3586 break;
15bbde2b 3587 }
e13693ec 3588 }
3589
3590 /* If there are LABELS inside the split insns increment the
3591 usage count so we don't delete the label. */
19d2fe05 3592 if (INSN_P (trial))
e13693ec 3593 {
3594 insn = insn_last;
3595 while (insn != NULL_RTX)
15bbde2b 3596 {
19d2fe05 3597 /* JUMP_P insns have already been "marked" above. */
6d7dc5b9 3598 if (NONJUMP_INSN_P (insn))
e13693ec 3599 mark_label_nuses (PATTERN (insn));
15bbde2b 3600
e13693ec 3601 insn = PREV_INSN (insn);
3602 }
15bbde2b 3603 }
3604
13751393 3605 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
e13693ec 3606
3607 delete_insn (trial);
3608 if (has_barrier)
3609 emit_barrier_after (tem);
3610
3611 /* Recursively call try_split for each new insn created; by the
3612 time control returns here that insn will be fully split, so
3613 set LAST and continue from the insn after the one returned.
3614 We can't use next_active_insn here since AFTER may be a note.
3615 Ignore deleted insns, which can be occur if not optimizing. */
3616 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3617 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3618 tem = try_split (PATTERN (tem), tem, 1);
3619
3620 /* Return either the first or the last insn, depending on which was
3621 requested. */
3622 return last
06f9d6ef 3623 ? (after ? PREV_INSN (after) : get_last_insn ())
e13693ec 3624 : NEXT_INSN (before);
15bbde2b 3625}
3626\f
3627/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3628 Store PATTERN in the pattern slots. */
15bbde2b 3629
3630rtx
35cb5232 3631make_insn_raw (rtx pattern)
15bbde2b 3632{
19cb6b50 3633 rtx insn;
15bbde2b 3634
d7c47c0e 3635 insn = rtx_alloc (INSN);
15bbde2b 3636
575333f9 3637 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3638 PATTERN (insn) = pattern;
3639 INSN_CODE (insn) = -1;
fc92fa61 3640 REG_NOTES (insn) = NULL;
375c1c8a 3641 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3642 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3643
fe7f701d 3644#ifdef ENABLE_RTL_CHECKING
3645 if (insn
9204e736 3646 && INSN_P (insn)
fe7f701d 3647 && (returnjump_p (insn)
3648 || (GET_CODE (insn) == SET
3649 && SET_DEST (insn) == pc_rtx)))
3650 {
c3ceba8e 3651 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
fe7f701d 3652 debug_rtx (insn);
3653 }
3654#endif
d823ba47 3655
15bbde2b 3656 return insn;
3657}
3658
9845d120 3659/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3660
3661rtx
3662make_debug_insn_raw (rtx pattern)
3663{
3664 rtx insn;
3665
3666 insn = rtx_alloc (DEBUG_INSN);
3667 INSN_UID (insn) = cur_debug_insn_uid++;
3668 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3669 INSN_UID (insn) = cur_insn_uid++;
3670
3671 PATTERN (insn) = pattern;
3672 INSN_CODE (insn) = -1;
3673 REG_NOTES (insn) = NULL;
3674 INSN_LOCATOR (insn) = curr_insn_locator ();
3675 BLOCK_FOR_INSN (insn) = NULL;
3676
3677 return insn;
3678}
3679
31d3e01c 3680/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3681
89140b26 3682rtx
35cb5232 3683make_jump_insn_raw (rtx pattern)
15bbde2b 3684{
19cb6b50 3685 rtx insn;
15bbde2b 3686
6a84e367 3687 insn = rtx_alloc (JUMP_INSN);
fc92fa61 3688 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3689
3690 PATTERN (insn) = pattern;
3691 INSN_CODE (insn) = -1;
fc92fa61 3692 REG_NOTES (insn) = NULL;
3693 JUMP_LABEL (insn) = NULL;
375c1c8a 3694 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3695 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3696
3697 return insn;
3698}
6e911104 3699
31d3e01c 3700/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3701
3702static rtx
35cb5232 3703make_call_insn_raw (rtx pattern)
6e911104 3704{
19cb6b50 3705 rtx insn;
6e911104 3706
3707 insn = rtx_alloc (CALL_INSN);
3708 INSN_UID (insn) = cur_insn_uid++;
3709
3710 PATTERN (insn) = pattern;
3711 INSN_CODE (insn) = -1;
6e911104 3712 REG_NOTES (insn) = NULL;
3713 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
375c1c8a 3714 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3715 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3716
3717 return insn;
3718}
15bbde2b 3719\f
3720/* Add INSN to the end of the doubly-linked list.
3721 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3722
3723void
35cb5232 3724add_insn (rtx insn)
15bbde2b 3725{
06f9d6ef 3726 PREV_INSN (insn) = get_last_insn();
15bbde2b 3727 NEXT_INSN (insn) = 0;
3728
06f9d6ef 3729 if (NULL != get_last_insn())
3730 NEXT_INSN (get_last_insn ()) = insn;
15bbde2b 3731
06f9d6ef 3732 if (NULL == get_insns ())
3733 set_first_insn (insn);
15bbde2b 3734
06f9d6ef 3735 set_last_insn (insn);
15bbde2b 3736}
3737
312de84d 3738/* Add INSN into the doubly-linked list after insn AFTER. This and
3739 the next should be the only functions called to insert an insn once
f65c10c0 3740 delay slots have been filled since only they know how to update a
312de84d 3741 SEQUENCE. */
15bbde2b 3742
3743void
3072d30e 3744add_insn_after (rtx insn, rtx after, basic_block bb)
15bbde2b 3745{
3746 rtx next = NEXT_INSN (after);
3747
611234b4 3748 gcc_assert (!optimize || !INSN_DELETED_P (after));
f65c10c0 3749
15bbde2b 3750 NEXT_INSN (insn) = next;
3751 PREV_INSN (insn) = after;
3752
3753 if (next)
3754 {
3755 PREV_INSN (next) = insn;
6d7dc5b9 3756 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
15bbde2b 3757 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3758 }
06f9d6ef 3759 else if (get_last_insn () == after)
3760 set_last_insn (insn);
15bbde2b 3761 else
3762 {
0a893c29 3763 struct sequence_stack *stack = seq_stack;
15bbde2b 3764 /* Scan all pending sequences too. */
3765 for (; stack; stack = stack->next)
3766 if (after == stack->last)
398f4855 3767 {
3768 stack->last = insn;
3769 break;
3770 }
312de84d 3771
611234b4 3772 gcc_assert (stack);
15bbde2b 3773 }
3774
6d7dc5b9 3775 if (!BARRIER_P (after)
3776 && !BARRIER_P (insn)
9dda7915 3777 && (bb = BLOCK_FOR_INSN (after)))
3778 {
3779 set_block_for_insn (insn, bb);
308f9b79 3780 if (INSN_P (insn))
3072d30e 3781 df_insn_rescan (insn);
9dda7915 3782 /* Should not happen as first in the BB is always
3fb1e43b 3783 either NOTE or LABEL. */
5496dbfc 3784 if (BB_END (bb) == after
9dda7915 3785 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 3786 && !BARRIER_P (insn)
ad4583d9 3787 && !NOTE_INSN_BASIC_BLOCK_P (insn))
5496dbfc 3788 BB_END (bb) = insn;
9dda7915 3789 }
3790
15bbde2b 3791 NEXT_INSN (after) = insn;
6d7dc5b9 3792 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
15bbde2b 3793 {
3794 rtx sequence = PATTERN (after);
3795 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3796 }
3797}
3798
312de84d 3799/* Add INSN into the doubly-linked list before insn BEFORE. This and
3072d30e 3800 the previous should be the only functions called to insert an insn
3801 once delay slots have been filled since only they know how to
3802 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3803 bb from before. */
312de84d 3804
3805void
3072d30e 3806add_insn_before (rtx insn, rtx before, basic_block bb)
312de84d 3807{
3808 rtx prev = PREV_INSN (before);
3809
611234b4 3810 gcc_assert (!optimize || !INSN_DELETED_P (before));
f65c10c0 3811
312de84d 3812 PREV_INSN (insn) = prev;
3813 NEXT_INSN (insn) = before;
3814
3815 if (prev)
3816 {
3817 NEXT_INSN (prev) = insn;
6d7dc5b9 3818 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
312de84d 3819 {
3820 rtx sequence = PATTERN (prev);
3821 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3822 }
3823 }
06f9d6ef 3824 else if (get_insns () == before)
3825 set_first_insn (insn);
312de84d 3826 else
3827 {
0a893c29 3828 struct sequence_stack *stack = seq_stack;
312de84d 3829 /* Scan all pending sequences too. */
3830 for (; stack; stack = stack->next)
3831 if (before == stack->first)
398f4855 3832 {
3833 stack->first = insn;
3834 break;
3835 }
312de84d 3836
611234b4 3837 gcc_assert (stack);
312de84d 3838 }
3839
48e1416a 3840 if (!bb
3072d30e 3841 && !BARRIER_P (before)
3842 && !BARRIER_P (insn))
3843 bb = BLOCK_FOR_INSN (before);
3844
3845 if (bb)
9dda7915 3846 {
3847 set_block_for_insn (insn, bb);
308f9b79 3848 if (INSN_P (insn))
3072d30e 3849 df_insn_rescan (insn);
611234b4 3850 /* Should not happen as first in the BB is always either NOTE or
ba821eb1 3851 LABEL. */
611234b4 3852 gcc_assert (BB_HEAD (bb) != insn
3853 /* Avoid clobbering of structure when creating new BB. */
3854 || BARRIER_P (insn)
ad4583d9 3855 || NOTE_INSN_BASIC_BLOCK_P (insn));
9dda7915 3856 }
3857
312de84d 3858 PREV_INSN (before) = insn;
6d7dc5b9 3859 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
312de84d 3860 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3861}
3862
3072d30e 3863
3864/* Replace insn with an deleted instruction note. */
3865
fc3d1695 3866void
3867set_insn_deleted (rtx insn)
3072d30e 3868{
3869 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3870 PUT_CODE (insn, NOTE);
3871 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3872}
3873
3874
7ddcf2bf 3875/* Remove an insn from its doubly-linked list. This function knows how
3876 to handle sequences. */
3877void
35cb5232 3878remove_insn (rtx insn)
7ddcf2bf 3879{
3880 rtx next = NEXT_INSN (insn);
3881 rtx prev = PREV_INSN (insn);
e4bf866d 3882 basic_block bb;
3883
3072d30e 3884 /* Later in the code, the block will be marked dirty. */
3885 df_insn_delete (NULL, INSN_UID (insn));
3886
7ddcf2bf 3887 if (prev)
3888 {
3889 NEXT_INSN (prev) = next;
6d7dc5b9 3890 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
7ddcf2bf 3891 {
3892 rtx sequence = PATTERN (prev);
3893 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3894 }
3895 }
06f9d6ef 3896 else if (get_insns () == insn)
3897 {
c8f0c143 3898 if (next)
3899 PREV_INSN (next) = NULL;
06f9d6ef 3900 set_first_insn (next);
3901 }
7ddcf2bf 3902 else
3903 {
0a893c29 3904 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3905 /* Scan all pending sequences too. */
3906 for (; stack; stack = stack->next)
3907 if (insn == stack->first)
3908 {
3909 stack->first = next;
3910 break;
3911 }
3912
611234b4 3913 gcc_assert (stack);
7ddcf2bf 3914 }
3915
3916 if (next)
3917 {
3918 PREV_INSN (next) = prev;
6d7dc5b9 3919 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
7ddcf2bf 3920 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3921 }
06f9d6ef 3922 else if (get_last_insn () == insn)
3923 set_last_insn (prev);
7ddcf2bf 3924 else
3925 {
0a893c29 3926 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3927 /* Scan all pending sequences too. */
3928 for (; stack; stack = stack->next)
3929 if (insn == stack->last)
3930 {
3931 stack->last = prev;
3932 break;
3933 }
3934
611234b4 3935 gcc_assert (stack);
7ddcf2bf 3936 }
6d7dc5b9 3937 if (!BARRIER_P (insn)
e4bf866d 3938 && (bb = BLOCK_FOR_INSN (insn)))
3939 {
137b701d 3940 if (NONDEBUG_INSN_P (insn))
3072d30e 3941 df_set_bb_dirty (bb);
5496dbfc 3942 if (BB_HEAD (bb) == insn)
e4bf866d 3943 {
f4aee538 3944 /* Never ever delete the basic block note without deleting whole
3945 basic block. */
611234b4 3946 gcc_assert (!NOTE_P (insn));
5496dbfc 3947 BB_HEAD (bb) = next;
e4bf866d 3948 }
5496dbfc 3949 if (BB_END (bb) == insn)
3950 BB_END (bb) = prev;
e4bf866d 3951 }
7ddcf2bf 3952}
3953
d5f9786f 3954/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3955
3956void
35cb5232 3957add_function_usage_to (rtx call_insn, rtx call_fusage)
d5f9786f 3958{
611234b4 3959 gcc_assert (call_insn && CALL_P (call_insn));
d5f9786f 3960
3961 /* Put the register usage information on the CALL. If there is already
3962 some usage information, put ours at the end. */
3963 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3964 {
3965 rtx link;
3966
3967 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3968 link = XEXP (link, 1))
3969 ;
3970
3971 XEXP (link, 1) = call_fusage;
3972 }
3973 else
3974 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3975}
3976
15bbde2b 3977/* Delete all insns made since FROM.
3978 FROM becomes the new last instruction. */
3979
3980void
35cb5232 3981delete_insns_since (rtx from)
15bbde2b 3982{
3983 if (from == 0)
06f9d6ef 3984 set_first_insn (0);
15bbde2b 3985 else
3986 NEXT_INSN (from) = 0;
06f9d6ef 3987 set_last_insn (from);
15bbde2b 3988}
3989
34e2ddcd 3990/* This function is deprecated, please use sequences instead.
3991
3992 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 3993 The insns to be moved are those between FROM and TO.
3994 They are moved to a new position after the insn AFTER.
3995 AFTER must not be FROM or TO or any insn in between.
3996
3997 This function does not know about SEQUENCEs and hence should not be
3998 called after delay-slot filling has been done. */
3999
4000void
35cb5232 4001reorder_insns_nobb (rtx from, rtx to, rtx after)
15bbde2b 4002{
7f6ca11f 4003#ifdef ENABLE_CHECKING
4004 rtx x;
4005 for (x = from; x != to; x = NEXT_INSN (x))
4006 gcc_assert (after != x);
4007 gcc_assert (after != to);
4008#endif
4009
15bbde2b 4010 /* Splice this bunch out of where it is now. */
4011 if (PREV_INSN (from))
4012 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4013 if (NEXT_INSN (to))
4014 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
06f9d6ef 4015 if (get_last_insn () == to)
4016 set_last_insn (PREV_INSN (from));
4017 if (get_insns () == from)
4018 set_first_insn (NEXT_INSN (to));
15bbde2b 4019
4020 /* Make the new neighbors point to it and it to them. */
4021 if (NEXT_INSN (after))
4022 PREV_INSN (NEXT_INSN (after)) = to;
4023
4024 NEXT_INSN (to) = NEXT_INSN (after);
4025 PREV_INSN (from) = after;
4026 NEXT_INSN (after) = from;
06f9d6ef 4027 if (after == get_last_insn())
4028 set_last_insn (to);
15bbde2b 4029}
4030
9dda7915 4031/* Same as function above, but take care to update BB boundaries. */
4032void
35cb5232 4033reorder_insns (rtx from, rtx to, rtx after)
9dda7915 4034{
4035 rtx prev = PREV_INSN (from);
4036 basic_block bb, bb2;
4037
4038 reorder_insns_nobb (from, to, after);
4039
6d7dc5b9 4040 if (!BARRIER_P (after)
9dda7915 4041 && (bb = BLOCK_FOR_INSN (after)))
4042 {
4043 rtx x;
3072d30e 4044 df_set_bb_dirty (bb);
d4c5e26d 4045
6d7dc5b9 4046 if (!BARRIER_P (from)
9dda7915 4047 && (bb2 = BLOCK_FOR_INSN (from)))
4048 {
5496dbfc 4049 if (BB_END (bb2) == to)
4050 BB_END (bb2) = prev;
3072d30e 4051 df_set_bb_dirty (bb2);
9dda7915 4052 }
4053
5496dbfc 4054 if (BB_END (bb) == after)
4055 BB_END (bb) = to;
9dda7915 4056
4057 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7097dd0c 4058 if (!BARRIER_P (x))
a2bdd643 4059 df_insn_change_bb (x, bb);
9dda7915 4060 }
4061}
4062
15bbde2b 4063\f
31d3e01c 4064/* Emit insn(s) of given code and pattern
4065 at a specified place within the doubly-linked list.
15bbde2b 4066
31d3e01c 4067 All of the emit_foo global entry points accept an object
4068 X which is either an insn list or a PATTERN of a single
4069 instruction.
15bbde2b 4070
31d3e01c 4071 There are thus a few canonical ways to generate code and
4072 emit it at a specific place in the instruction stream. For
4073 example, consider the instruction named SPOT and the fact that
4074 we would like to emit some instructions before SPOT. We might
4075 do it like this:
15bbde2b 4076
31d3e01c 4077 start_sequence ();
4078 ... emit the new instructions ...
4079 insns_head = get_insns ();
4080 end_sequence ();
15bbde2b 4081
31d3e01c 4082 emit_insn_before (insns_head, SPOT);
15bbde2b 4083
31d3e01c 4084 It used to be common to generate SEQUENCE rtl instead, but that
4085 is a relic of the past which no longer occurs. The reason is that
4086 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4087 generated would almost certainly die right after it was created. */
15bbde2b 4088
5f7c5ddd 4089static rtx
4090emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4091 rtx (*make_raw) (rtx))
15bbde2b 4092{
19cb6b50 4093 rtx insn;
15bbde2b 4094
611234b4 4095 gcc_assert (before);
31d3e01c 4096
4097 if (x == NULL_RTX)
4098 return last;
4099
4100 switch (GET_CODE (x))
15bbde2b 4101 {
9845d120 4102 case DEBUG_INSN:
31d3e01c 4103 case INSN:
4104 case JUMP_INSN:
4105 case CALL_INSN:
4106 case CODE_LABEL:
4107 case BARRIER:
4108 case NOTE:
4109 insn = x;
4110 while (insn)
4111 {
4112 rtx next = NEXT_INSN (insn);
3072d30e 4113 add_insn_before (insn, before, bb);
31d3e01c 4114 last = insn;
4115 insn = next;
4116 }
4117 break;
4118
4119#ifdef ENABLE_RTL_CHECKING
4120 case SEQUENCE:
611234b4 4121 gcc_unreachable ();
31d3e01c 4122 break;
4123#endif
4124
4125 default:
5f7c5ddd 4126 last = (*make_raw) (x);
3072d30e 4127 add_insn_before (last, before, bb);
31d3e01c 4128 break;
15bbde2b 4129 }
4130
31d3e01c 4131 return last;
15bbde2b 4132}
4133
5f7c5ddd 4134/* Make X be output before the instruction BEFORE. */
4135
4136rtx
4137emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4138{
4139 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4140}
4141
31d3e01c 4142/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4143 and output it before the instruction BEFORE. */
4144
4145rtx
0891f67c 4146emit_jump_insn_before_noloc (rtx x, rtx before)
15bbde2b 4147{
5f7c5ddd 4148 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4149 make_jump_insn_raw);
15bbde2b 4150}
4151
31d3e01c 4152/* Make an instruction with body X and code CALL_INSN
cd0fe062 4153 and output it before the instruction BEFORE. */
4154
4155rtx
0891f67c 4156emit_call_insn_before_noloc (rtx x, rtx before)
cd0fe062 4157{
5f7c5ddd 4158 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4159 make_call_insn_raw);
cd0fe062 4160}
4161
9845d120 4162/* Make an instruction with body X and code DEBUG_INSN
4163 and output it before the instruction BEFORE. */
4164
4165rtx
4166emit_debug_insn_before_noloc (rtx x, rtx before)
4167{
5f7c5ddd 4168 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4169 make_debug_insn_raw);
9845d120 4170}
4171
15bbde2b 4172/* Make an insn of code BARRIER
71caadc0 4173 and output it before the insn BEFORE. */
15bbde2b 4174
4175rtx
35cb5232 4176emit_barrier_before (rtx before)
15bbde2b 4177{
19cb6b50 4178 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4179
4180 INSN_UID (insn) = cur_insn_uid++;
4181
3072d30e 4182 add_insn_before (insn, before, NULL);
15bbde2b 4183 return insn;
4184}
4185
71caadc0 4186/* Emit the label LABEL before the insn BEFORE. */
4187
4188rtx
35cb5232 4189emit_label_before (rtx label, rtx before)
71caadc0 4190{
4191 /* This can be called twice for the same label as a result of the
4192 confusion that follows a syntax error! So make it harmless. */
4193 if (INSN_UID (label) == 0)
4194 {
4195 INSN_UID (label) = cur_insn_uid++;
3072d30e 4196 add_insn_before (label, before, NULL);
71caadc0 4197 }
4198
4199 return label;
4200}
4201
15bbde2b 4202/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4203
4204rtx
ad4583d9 4205emit_note_before (enum insn_note subtype, rtx before)
15bbde2b 4206{
19cb6b50 4207 rtx note = rtx_alloc (NOTE);
15bbde2b 4208 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4209 NOTE_KIND (note) = subtype;
ab87d1bc 4210 BLOCK_FOR_INSN (note) = NULL;
60ad3b0e 4211 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
15bbde2b 4212
3072d30e 4213 add_insn_before (note, before, NULL);
15bbde2b 4214 return note;
4215}
4216\f
31d3e01c 4217/* Helper for emit_insn_after, handles lists of instructions
4218 efficiently. */
15bbde2b 4219
31d3e01c 4220static rtx
3072d30e 4221emit_insn_after_1 (rtx first, rtx after, basic_block bb)
15bbde2b 4222{
31d3e01c 4223 rtx last;
4224 rtx after_after;
3072d30e 4225 if (!bb && !BARRIER_P (after))
4226 bb = BLOCK_FOR_INSN (after);
15bbde2b 4227
3072d30e 4228 if (bb)
15bbde2b 4229 {
3072d30e 4230 df_set_bb_dirty (bb);
31d3e01c 4231 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
6d7dc5b9 4232 if (!BARRIER_P (last))
3072d30e 4233 {
4234 set_block_for_insn (last, bb);
4235 df_insn_rescan (last);
4236 }
6d7dc5b9 4237 if (!BARRIER_P (last))
3072d30e 4238 {
4239 set_block_for_insn (last, bb);
4240 df_insn_rescan (last);
4241 }
5496dbfc 4242 if (BB_END (bb) == after)
4243 BB_END (bb) = last;
15bbde2b 4244 }
4245 else
31d3e01c 4246 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4247 continue;
4248
4249 after_after = NEXT_INSN (after);
4250
4251 NEXT_INSN (after) = first;
4252 PREV_INSN (first) = after;
4253 NEXT_INSN (last) = after_after;
4254 if (after_after)
4255 PREV_INSN (after_after) = last;
4256
06f9d6ef 4257 if (after == get_last_insn())
4258 set_last_insn (last);
e1ab7874 4259
31d3e01c 4260 return last;
4261}
4262
5f7c5ddd 4263static rtx
4264emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4265 rtx (*make_raw)(rtx))
31d3e01c 4266{
4267 rtx last = after;
4268
611234b4 4269 gcc_assert (after);
31d3e01c 4270
4271 if (x == NULL_RTX)
4272 return last;
4273
4274 switch (GET_CODE (x))
15bbde2b 4275 {
9845d120 4276 case DEBUG_INSN:
31d3e01c 4277 case INSN:
4278 case JUMP_INSN:
4279 case CALL_INSN:
4280 case CODE_LABEL:
4281 case BARRIER:
4282 case NOTE:
3072d30e 4283 last = emit_insn_after_1 (x, after, bb);
31d3e01c 4284 break;
4285
4286#ifdef ENABLE_RTL_CHECKING
4287 case SEQUENCE:
611234b4 4288 gcc_unreachable ();
31d3e01c 4289 break;
4290#endif
4291
4292 default:
5f7c5ddd 4293 last = (*make_raw) (x);
3072d30e 4294 add_insn_after (last, after, bb);
31d3e01c 4295 break;
15bbde2b 4296 }
4297
31d3e01c 4298 return last;
15bbde2b 4299}
4300
5f7c5ddd 4301/* Make X be output after the insn AFTER and set the BB of insn. If
4302 BB is NULL, an attempt is made to infer the BB from AFTER. */
4303
4304rtx
4305emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4306{
4307 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4308}
4309
1bea98fb 4310
31d3e01c 4311/* Make an insn of code JUMP_INSN with body X
15bbde2b 4312 and output it after the insn AFTER. */
4313
4314rtx
0891f67c 4315emit_jump_insn_after_noloc (rtx x, rtx after)
15bbde2b 4316{
5f7c5ddd 4317 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
31d3e01c 4318}
4319
4320/* Make an instruction with body X and code CALL_INSN
4321 and output it after the instruction AFTER. */
4322
4323rtx
0891f67c 4324emit_call_insn_after_noloc (rtx x, rtx after)
31d3e01c 4325{
5f7c5ddd 4326 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
15bbde2b 4327}
4328
9845d120 4329/* Make an instruction with body X and code CALL_INSN
4330 and output it after the instruction AFTER. */
4331
4332rtx
4333emit_debug_insn_after_noloc (rtx x, rtx after)
4334{
5f7c5ddd 4335 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
9845d120 4336}
4337
15bbde2b 4338/* Make an insn of code BARRIER
4339 and output it after the insn AFTER. */
4340
4341rtx
35cb5232 4342emit_barrier_after (rtx after)
15bbde2b 4343{
19cb6b50 4344 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4345
4346 INSN_UID (insn) = cur_insn_uid++;
4347
3072d30e 4348 add_insn_after (insn, after, NULL);
15bbde2b 4349 return insn;
4350}
4351
4352/* Emit the label LABEL after the insn AFTER. */
4353
4354rtx
35cb5232 4355emit_label_after (rtx label, rtx after)
15bbde2b 4356{
4357 /* This can be called twice for the same label
4358 as a result of the confusion that follows a syntax error!
4359 So make it harmless. */
4360 if (INSN_UID (label) == 0)
4361 {
4362 INSN_UID (label) = cur_insn_uid++;
3072d30e 4363 add_insn_after (label, after, NULL);
15bbde2b 4364 }
4365
4366 return label;
4367}
4368
4369/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4370
4371rtx
ad4583d9 4372emit_note_after (enum insn_note subtype, rtx after)
15bbde2b 4373{
19cb6b50 4374 rtx note = rtx_alloc (NOTE);
15bbde2b 4375 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4376 NOTE_KIND (note) = subtype;
ab87d1bc 4377 BLOCK_FOR_INSN (note) = NULL;
60ad3b0e 4378 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3072d30e 4379 add_insn_after (note, after, NULL);
15bbde2b 4380 return note;
4381}
15bbde2b 4382\f
ede4ebcb 4383/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4384 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4385
4386static rtx
4387emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4388 rtx (*make_raw) (rtx))
d321a68b 4389{
ede4ebcb 4390 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
d321a68b 4391
0891f67c 4392 if (pattern == NULL_RTX || !loc)
ca154f3f 4393 return last;
4394
31d3e01c 4395 after = NEXT_INSN (after);
4396 while (1)
4397 {
0891f67c 4398 if (active_insn_p (after) && !INSN_LOCATOR (after))
13751393 4399 INSN_LOCATOR (after) = loc;
31d3e01c 4400 if (after == last)
4401 break;
4402 after = NEXT_INSN (after);
4403 }
d321a68b 4404 return last;
4405}
4406
ede4ebcb 4407/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4408 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4409 any DEBUG_INSNs. */
4410
4411static rtx
4412emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4413 rtx (*make_raw) (rtx))
0891f67c 4414{
9845d120 4415 rtx prev = after;
4416
ede4ebcb 4417 if (skip_debug_insns)
4418 while (DEBUG_INSN_P (prev))
4419 prev = PREV_INSN (prev);
9845d120 4420
4421 if (INSN_P (prev))
ede4ebcb 4422 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4423 make_raw);
0891f67c 4424 else
ede4ebcb 4425 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0891f67c 4426}
4427
ede4ebcb 4428/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
d321a68b 4429rtx
ede4ebcb 4430emit_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4431{
ede4ebcb 4432 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4433}
31d3e01c 4434
ede4ebcb 4435/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4436rtx
4437emit_insn_after (rtx pattern, rtx after)
4438{
4439 return emit_pattern_after (pattern, after, true, make_insn_raw);
4440}
ca154f3f 4441
ede4ebcb 4442/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4443rtx
4444emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4445{
4446 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
d321a68b 4447}
4448
0891f67c 4449/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4450rtx
4451emit_jump_insn_after (rtx pattern, rtx after)
4452{
ede4ebcb 4453 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
0891f67c 4454}
4455
ede4ebcb 4456/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
d321a68b 4457rtx
35cb5232 4458emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4459{
ede4ebcb 4460 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
d321a68b 4461}
4462
0891f67c 4463/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4464rtx
4465emit_call_insn_after (rtx pattern, rtx after)
4466{
ede4ebcb 4467 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
0891f67c 4468}
4469
ede4ebcb 4470/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
9845d120 4471rtx
4472emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4473{
ede4ebcb 4474 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
9845d120 4475}
4476
4477/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4478rtx
4479emit_debug_insn_after (rtx pattern, rtx after)
4480{
ede4ebcb 4481 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
9845d120 4482}
4483
ede4ebcb 4484/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4485 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4486 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4487 CALL_INSN, etc. */
4488
4489static rtx
4490emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4491 rtx (*make_raw) (rtx))
d321a68b 4492{
4493 rtx first = PREV_INSN (before);
ede4ebcb 4494 rtx last = emit_pattern_before_noloc (pattern, before,
4495 insnp ? before : NULL_RTX,
4496 NULL, make_raw);
0891f67c 4497
4498 if (pattern == NULL_RTX || !loc)
4499 return last;
4500
4486418e 4501 if (!first)
4502 first = get_insns ();
4503 else
4504 first = NEXT_INSN (first);
0891f67c 4505 while (1)
4506 {
4507 if (active_insn_p (first) && !INSN_LOCATOR (first))
4508 INSN_LOCATOR (first) = loc;
4509 if (first == last)
4510 break;
4511 first = NEXT_INSN (first);
4512 }
4513 return last;
4514}
4515
ede4ebcb 4516/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4517 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4518 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4519 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4520
4521static rtx
4522emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4523 bool insnp, rtx (*make_raw) (rtx))
0891f67c 4524{
9845d120 4525 rtx next = before;
4526
ede4ebcb 4527 if (skip_debug_insns)
4528 while (DEBUG_INSN_P (next))
4529 next = PREV_INSN (next);
9845d120 4530
4531 if (INSN_P (next))
ede4ebcb 4532 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4533 insnp, make_raw);
0891f67c 4534 else
ede4ebcb 4535 return emit_pattern_before_noloc (pattern, before,
4536 insnp ? before : NULL_RTX,
4537 NULL, make_raw);
0891f67c 4538}
4539
ede4ebcb 4540/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
0891f67c 4541rtx
ede4ebcb 4542emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0891f67c 4543{
ede4ebcb 4544 return emit_pattern_before_setloc (pattern, before, loc, true,
4545 make_insn_raw);
4546}
0891f67c 4547
ede4ebcb 4548/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4549rtx
4550emit_insn_before (rtx pattern, rtx before)
4551{
4552 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4553}
0891f67c 4554
ede4ebcb 4555/* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4556rtx
4557emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4558{
4559 return emit_pattern_before_setloc (pattern, before, loc, false,
4560 make_jump_insn_raw);
0891f67c 4561}
4562
4563/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4564rtx
4565emit_jump_insn_before (rtx pattern, rtx before)
4566{
ede4ebcb 4567 return emit_pattern_before (pattern, before, true, false,
4568 make_jump_insn_raw);
0891f67c 4569}
4570
ede4ebcb 4571/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
0891f67c 4572rtx
4573emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4574{
ede4ebcb 4575 return emit_pattern_before_setloc (pattern, before, loc, false,
4576 make_call_insn_raw);
d321a68b 4577}
0891f67c 4578
ede4ebcb 4579/* Like emit_call_insn_before_noloc,
4580 but set insn_locator according to BEFORE. */
0891f67c 4581rtx
4582emit_call_insn_before (rtx pattern, rtx before)
4583{
ede4ebcb 4584 return emit_pattern_before (pattern, before, true, false,
4585 make_call_insn_raw);
0891f67c 4586}
9845d120 4587
ede4ebcb 4588/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
9845d120 4589rtx
4590emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4591{
ede4ebcb 4592 return emit_pattern_before_setloc (pattern, before, loc, false,
4593 make_debug_insn_raw);
9845d120 4594}
4595
ede4ebcb 4596/* Like emit_debug_insn_before_noloc,
4597 but set insn_locator according to BEFORE. */
9845d120 4598rtx
4599emit_debug_insn_before (rtx pattern, rtx before)
4600{
ede4ebcb 4601 return emit_pattern_before (pattern, before, false, false,
4602 make_debug_insn_raw);
9845d120 4603}
d321a68b 4604\f
31d3e01c 4605/* Take X and emit it at the end of the doubly-linked
4606 INSN list.
15bbde2b 4607
4608 Returns the last insn emitted. */
4609
4610rtx
35cb5232 4611emit_insn (rtx x)
15bbde2b 4612{
06f9d6ef 4613 rtx last = get_last_insn();
31d3e01c 4614 rtx insn;
15bbde2b 4615
31d3e01c 4616 if (x == NULL_RTX)
4617 return last;
15bbde2b 4618
31d3e01c 4619 switch (GET_CODE (x))
4620 {
9845d120 4621 case DEBUG_INSN:
31d3e01c 4622 case INSN:
4623 case JUMP_INSN:
4624 case CALL_INSN:
4625 case CODE_LABEL:
4626 case BARRIER:
4627 case NOTE:
4628 insn = x;
4629 while (insn)
15bbde2b 4630 {
31d3e01c 4631 rtx next = NEXT_INSN (insn);
15bbde2b 4632 add_insn (insn);
31d3e01c 4633 last = insn;
4634 insn = next;
15bbde2b 4635 }
31d3e01c 4636 break;
15bbde2b 4637
31d3e01c 4638#ifdef ENABLE_RTL_CHECKING
4639 case SEQUENCE:
611234b4 4640 gcc_unreachable ();
31d3e01c 4641 break;
4642#endif
15bbde2b 4643
31d3e01c 4644 default:
4645 last = make_insn_raw (x);
4646 add_insn (last);
4647 break;
15bbde2b 4648 }
4649
4650 return last;
4651}
4652
9845d120 4653/* Make an insn of code DEBUG_INSN with pattern X
4654 and add it to the end of the doubly-linked list. */
4655
4656rtx
4657emit_debug_insn (rtx x)
4658{
06f9d6ef 4659 rtx last = get_last_insn();
9845d120 4660 rtx insn;
4661
4662 if (x == NULL_RTX)
4663 return last;
4664
4665 switch (GET_CODE (x))
4666 {
4667 case DEBUG_INSN:
4668 case INSN:
4669 case JUMP_INSN:
4670 case CALL_INSN:
4671 case CODE_LABEL:
4672 case BARRIER:
4673 case NOTE:
4674 insn = x;
4675 while (insn)
4676 {
4677 rtx next = NEXT_INSN (insn);
4678 add_insn (insn);
4679 last = insn;
4680 insn = next;
4681 }
4682 break;
4683
4684#ifdef ENABLE_RTL_CHECKING
4685 case SEQUENCE:
4686 gcc_unreachable ();
4687 break;
4688#endif
4689
4690 default:
4691 last = make_debug_insn_raw (x);
4692 add_insn (last);
4693 break;
4694 }
4695
4696 return last;
4697}
4698
31d3e01c 4699/* Make an insn of code JUMP_INSN with pattern X
4700 and add it to the end of the doubly-linked list. */
15bbde2b 4701
4702rtx
35cb5232 4703emit_jump_insn (rtx x)
15bbde2b 4704{
d90b3d04 4705 rtx last = NULL_RTX, insn;
15bbde2b 4706
31d3e01c 4707 switch (GET_CODE (x))
15bbde2b 4708 {
9845d120 4709 case DEBUG_INSN:
31d3e01c 4710 case INSN:
4711 case JUMP_INSN:
4712 case CALL_INSN:
4713 case CODE_LABEL:
4714 case BARRIER:
4715 case NOTE:
4716 insn = x;
4717 while (insn)
4718 {
4719 rtx next = NEXT_INSN (insn);
4720 add_insn (insn);
4721 last = insn;
4722 insn = next;
4723 }
4724 break;
b36b07d8 4725
31d3e01c 4726#ifdef ENABLE_RTL_CHECKING
4727 case SEQUENCE:
611234b4 4728 gcc_unreachable ();
31d3e01c 4729 break;
4730#endif
b36b07d8 4731
31d3e01c 4732 default:
4733 last = make_jump_insn_raw (x);
4734 add_insn (last);
4735 break;
9dda7915 4736 }
b36b07d8 4737
4738 return last;
4739}
4740
31d3e01c 4741/* Make an insn of code CALL_INSN with pattern X
15bbde2b 4742 and add it to the end of the doubly-linked list. */
4743
4744rtx
35cb5232 4745emit_call_insn (rtx x)
15bbde2b 4746{
31d3e01c 4747 rtx insn;
4748
4749 switch (GET_CODE (x))
15bbde2b 4750 {
9845d120 4751 case DEBUG_INSN:
31d3e01c 4752 case INSN:
4753 case JUMP_INSN:
4754 case CALL_INSN:
4755 case CODE_LABEL:
4756 case BARRIER:
4757 case NOTE:
4758 insn = emit_insn (x);
4759 break;
15bbde2b 4760
31d3e01c 4761#ifdef ENABLE_RTL_CHECKING
4762 case SEQUENCE:
611234b4 4763 gcc_unreachable ();
31d3e01c 4764 break;
4765#endif
15bbde2b 4766
31d3e01c 4767 default:
4768 insn = make_call_insn_raw (x);
15bbde2b 4769 add_insn (insn);
31d3e01c 4770 break;
15bbde2b 4771 }
31d3e01c 4772
4773 return insn;
15bbde2b 4774}
4775
4776/* Add the label LABEL to the end of the doubly-linked list. */
4777
4778rtx
35cb5232 4779emit_label (rtx label)
15bbde2b 4780{
4781 /* This can be called twice for the same label
4782 as a result of the confusion that follows a syntax error!
4783 So make it harmless. */
4784 if (INSN_UID (label) == 0)
4785 {
4786 INSN_UID (label) = cur_insn_uid++;
4787 add_insn (label);
4788 }
4789 return label;
4790}
4791
4792/* Make an insn of code BARRIER
4793 and add it to the end of the doubly-linked list. */
4794
4795rtx
35cb5232 4796emit_barrier (void)
15bbde2b 4797{
19cb6b50 4798 rtx barrier = rtx_alloc (BARRIER);
15bbde2b 4799 INSN_UID (barrier) = cur_insn_uid++;
4800 add_insn (barrier);
4801 return barrier;
4802}
4803
2f57e3d9 4804/* Emit a copy of note ORIG. */
35cb5232 4805
2f57e3d9 4806rtx
4807emit_note_copy (rtx orig)
4808{
4809 rtx note;
48e1416a 4810
2f57e3d9 4811 note = rtx_alloc (NOTE);
48e1416a 4812
2f57e3d9 4813 INSN_UID (note) = cur_insn_uid++;
4814 NOTE_DATA (note) = NOTE_DATA (orig);
ad4583d9 4815 NOTE_KIND (note) = NOTE_KIND (orig);
2f57e3d9 4816 BLOCK_FOR_INSN (note) = NULL;
4817 add_insn (note);
48e1416a 4818
31b97e8f 4819 return note;
15bbde2b 4820}
4821
31b97e8f 4822/* Make an insn of code NOTE or type NOTE_NO
4823 and add it to the end of the doubly-linked list. */
15bbde2b 4824
4825rtx
ad4583d9 4826emit_note (enum insn_note kind)
15bbde2b 4827{
19cb6b50 4828 rtx note;
15bbde2b 4829
15bbde2b 4830 note = rtx_alloc (NOTE);
4831 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4832 NOTE_KIND (note) = kind;
6c7786cb 4833 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ab87d1bc 4834 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4835 add_insn (note);
4836 return note;
4837}
4838
18b42941 4839/* Emit a clobber of lvalue X. */
4840
4841rtx
4842emit_clobber (rtx x)
4843{
4844 /* CONCATs should not appear in the insn stream. */
4845 if (GET_CODE (x) == CONCAT)
4846 {
4847 emit_clobber (XEXP (x, 0));
4848 return emit_clobber (XEXP (x, 1));
4849 }
4850 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4851}
4852
4853/* Return a sequence of insns to clobber lvalue X. */
4854
4855rtx
4856gen_clobber (rtx x)
4857{
4858 rtx seq;
4859
4860 start_sequence ();
4861 emit_clobber (x);
4862 seq = get_insns ();
4863 end_sequence ();
4864 return seq;
4865}
4866
4867/* Emit a use of rvalue X. */
4868
4869rtx
4870emit_use (rtx x)
4871{
4872 /* CONCATs should not appear in the insn stream. */
4873 if (GET_CODE (x) == CONCAT)
4874 {
4875 emit_use (XEXP (x, 0));
4876 return emit_use (XEXP (x, 1));
4877 }
4878 return emit_insn (gen_rtx_USE (VOIDmode, x));
4879}
4880
4881/* Return a sequence of insns to use rvalue X. */
4882
4883rtx
4884gen_use (rtx x)
4885{
4886 rtx seq;
4887
4888 start_sequence ();
4889 emit_use (x);
4890 seq = get_insns ();
4891 end_sequence ();
4892 return seq;
4893}
4894
15bbde2b 4895/* Cause next statement to emit a line note even if the line number
bccd9980 4896 has not changed. */
15bbde2b 4897
4898void
35cb5232 4899force_next_line_note (void)
15bbde2b 4900{
7bd3dcc4 4901 last_location = -1;
15bbde2b 4902}
f1934a33 4903
4904/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 4905 note of this type already exists, remove it first. */
f1934a33 4906
c080d8f0 4907rtx
35cb5232 4908set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
f1934a33 4909{
4910 rtx note = find_reg_note (insn, kind, NULL_RTX);
4911
7e6224ab 4912 switch (kind)
4913 {
4914 case REG_EQUAL:
4915 case REG_EQUIV:
4916 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4917 has multiple sets (some callers assume single_set
4918 means the insn only has one set, when in fact it
4919 means the insn only has one * useful * set). */
4920 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4921 {
611234b4 4922 gcc_assert (!note);
7e6224ab 4923 return NULL_RTX;
4924 }
4925
4926 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4927 It serves no useful purpose and breaks eliminate_regs. */
4928 if (GET_CODE (datum) == ASM_OPERANDS)
4929 return NULL_RTX;
3072d30e 4930
4931 if (note)
4932 {
4933 XEXP (note, 0) = datum;
4934 df_notes_rescan (insn);
4935 return note;
4936 }
7e6224ab 4937 break;
4938
4939 default:
3072d30e 4940 if (note)
4941 {
4942 XEXP (note, 0) = datum;
4943 return note;
4944 }
7e6224ab 4945 break;
4946 }
c080d8f0 4947
a1ddb869 4948 add_reg_note (insn, kind, datum);
3072d30e 4949
4950 switch (kind)
c080d8f0 4951 {
3072d30e 4952 case REG_EQUAL:
4953 case REG_EQUIV:
4954 df_notes_rescan (insn);
4955 break;
4956 default:
4957 break;
c080d8f0 4958 }
f1934a33 4959
c080d8f0 4960 return REG_NOTES (insn);
f1934a33 4961}
15bbde2b 4962\f
4963/* Return an indication of which type of insn should have X as a body.
4964 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4965
9b69f75b 4966static enum rtx_code
35cb5232 4967classify_insn (rtx x)
15bbde2b 4968{
6d7dc5b9 4969 if (LABEL_P (x))
15bbde2b 4970 return CODE_LABEL;
4971 if (GET_CODE (x) == CALL)
4972 return CALL_INSN;
4973 if (GET_CODE (x) == RETURN)
4974 return JUMP_INSN;
4975 if (GET_CODE (x) == SET)
4976 {
4977 if (SET_DEST (x) == pc_rtx)
4978 return JUMP_INSN;
4979 else if (GET_CODE (SET_SRC (x)) == CALL)
4980 return CALL_INSN;
4981 else
4982 return INSN;
4983 }
4984 if (GET_CODE (x) == PARALLEL)
4985 {
19cb6b50 4986 int j;
15bbde2b 4987 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4988 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4989 return CALL_INSN;
4990 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4991 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4992 return JUMP_INSN;
4993 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4994 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4995 return CALL_INSN;
4996 }
4997 return INSN;
4998}
4999
5000/* Emit the rtl pattern X as an appropriate kind of insn.
5001 If X is a label, it is simply added into the insn chain. */
5002
5003rtx
35cb5232 5004emit (rtx x)
15bbde2b 5005{
5006 enum rtx_code code = classify_insn (x);
5007
611234b4 5008 switch (code)
15bbde2b 5009 {
611234b4 5010 case CODE_LABEL:
5011 return emit_label (x);
5012 case INSN:
5013 return emit_insn (x);
5014 case JUMP_INSN:
5015 {
5016 rtx insn = emit_jump_insn (x);
5017 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5018 return emit_barrier ();
5019 return insn;
5020 }
5021 case CALL_INSN:
5022 return emit_call_insn (x);
9845d120 5023 case DEBUG_INSN:
5024 return emit_debug_insn (x);
611234b4 5025 default:
5026 gcc_unreachable ();
15bbde2b 5027 }
15bbde2b 5028}
5029\f
1f3233d1 5030/* Space for free sequence stack entries. */
7035b2ab 5031static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
1f3233d1 5032
735f4358 5033/* Begin emitting insns to a sequence. If this sequence will contain
5034 something that might cause the compiler to pop arguments to function
5035 calls (because those pops have previously been deferred; see
5036 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5037 before calling this function. That will ensure that the deferred
5038 pops are not accidentally emitted in the middle of this sequence. */
15bbde2b 5039
5040void
35cb5232 5041start_sequence (void)
15bbde2b 5042{
5043 struct sequence_stack *tem;
5044
1f3233d1 5045 if (free_sequence_stack != NULL)
5046 {
5047 tem = free_sequence_stack;
5048 free_sequence_stack = tem->next;
5049 }
5050 else
ba72912a 5051 tem = ggc_alloc_sequence_stack ();
15bbde2b 5052
0a893c29 5053 tem->next = seq_stack;
06f9d6ef 5054 tem->first = get_insns ();
5055 tem->last = get_last_insn ();
15bbde2b 5056
0a893c29 5057 seq_stack = tem;
15bbde2b 5058
06f9d6ef 5059 set_first_insn (0);
5060 set_last_insn (0);
15bbde2b 5061}
5062
b49854c6 5063/* Set up the insn chain starting with FIRST as the current sequence,
5064 saving the previously current one. See the documentation for
5065 start_sequence for more information about how to use this function. */
15bbde2b 5066
5067void
35cb5232 5068push_to_sequence (rtx first)
15bbde2b 5069{
5070 rtx last;
5071
5072 start_sequence ();
5073
3c802a1e 5074 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5075 ;
15bbde2b 5076
06f9d6ef 5077 set_first_insn (first);
5078 set_last_insn (last);
15bbde2b 5079}
5080
28bf151d 5081/* Like push_to_sequence, but take the last insn as an argument to avoid
5082 looping through the list. */
5083
5084void
5085push_to_sequence2 (rtx first, rtx last)
5086{
5087 start_sequence ();
5088
06f9d6ef 5089 set_first_insn (first);
5090 set_last_insn (last);
28bf151d 5091}
5092
ab74c92f 5093/* Set up the outer-level insn chain
5094 as the current sequence, saving the previously current one. */
5095
5096void
35cb5232 5097push_topmost_sequence (void)
ab74c92f 5098{
2041cfd9 5099 struct sequence_stack *stack, *top = NULL;
ab74c92f 5100
5101 start_sequence ();
5102
0a893c29 5103 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5104 top = stack;
5105
06f9d6ef 5106 set_first_insn (top->first);
5107 set_last_insn (top->last);
ab74c92f 5108}
5109
5110/* After emitting to the outer-level insn chain, update the outer-level
5111 insn chain, and restore the previous saved state. */
5112
5113void
35cb5232 5114pop_topmost_sequence (void)
ab74c92f 5115{
2041cfd9 5116 struct sequence_stack *stack, *top = NULL;
ab74c92f 5117
0a893c29 5118 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5119 top = stack;
5120
06f9d6ef 5121 top->first = get_insns ();
5122 top->last = get_last_insn ();
ab74c92f 5123
5124 end_sequence ();
5125}
5126
15bbde2b 5127/* After emitting to a sequence, restore previous saved state.
5128
b49854c6 5129 To get the contents of the sequence just made, you must call
31d3e01c 5130 `get_insns' *before* calling here.
b49854c6 5131
5132 If the compiler might have deferred popping arguments while
5133 generating this sequence, and this sequence will not be immediately
5134 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 5135 before calling get_insns. That will ensure that the deferred
b49854c6 5136 pops are inserted into this sequence, and not into some random
5137 location in the instruction stream. See INHIBIT_DEFER_POP for more
5138 information about deferred popping of arguments. */
15bbde2b 5139
5140void
35cb5232 5141end_sequence (void)
15bbde2b 5142{
0a893c29 5143 struct sequence_stack *tem = seq_stack;
15bbde2b 5144
06f9d6ef 5145 set_first_insn (tem->first);
5146 set_last_insn (tem->last);
0a893c29 5147 seq_stack = tem->next;
15bbde2b 5148
1f3233d1 5149 memset (tem, 0, sizeof (*tem));
5150 tem->next = free_sequence_stack;
5151 free_sequence_stack = tem;
15bbde2b 5152}
5153
5154/* Return 1 if currently emitting into a sequence. */
5155
5156int
35cb5232 5157in_sequence_p (void)
15bbde2b 5158{
0a893c29 5159 return seq_stack != 0;
15bbde2b 5160}
15bbde2b 5161\f
02ebfa52 5162/* Put the various virtual registers into REGNO_REG_RTX. */
5163
2f3874ce 5164static void
b079a207 5165init_virtual_regs (void)
02ebfa52 5166{
b079a207 5167 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5168 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5169 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5170 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5171 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
60778e62 5172 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5173 = virtual_preferred_stack_boundary_rtx;
0a893c29 5174}
5175
928d57e3 5176\f
5177/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5178static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5179static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5180static int copy_insn_n_scratches;
5181
5182/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5183 copied an ASM_OPERANDS.
5184 In that case, it is the original input-operand vector. */
5185static rtvec orig_asm_operands_vector;
5186
5187/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5188 copied an ASM_OPERANDS.
5189 In that case, it is the copied input-operand vector. */
5190static rtvec copy_asm_operands_vector;
5191
5192/* Likewise for the constraints vector. */
5193static rtvec orig_asm_constraints_vector;
5194static rtvec copy_asm_constraints_vector;
5195
5196/* Recursively create a new copy of an rtx for copy_insn.
5197 This function differs from copy_rtx in that it handles SCRATCHes and
5198 ASM_OPERANDs properly.
5199 Normally, this function is not used directly; use copy_insn as front end.
5200 However, you could first copy an insn pattern with copy_insn and then use
5201 this function afterwards to properly copy any REG_NOTEs containing
5202 SCRATCHes. */
5203
5204rtx
35cb5232 5205copy_insn_1 (rtx orig)
928d57e3 5206{
19cb6b50 5207 rtx copy;
5208 int i, j;
5209 RTX_CODE code;
5210 const char *format_ptr;
928d57e3 5211
25e880b1 5212 if (orig == NULL)
5213 return NULL;
5214
928d57e3 5215 code = GET_CODE (orig);
5216
5217 switch (code)
5218 {
5219 case REG:
928d57e3 5220 case CONST_INT:
5221 case CONST_DOUBLE:
e397ad8e 5222 case CONST_FIXED:
886cfd4f 5223 case CONST_VECTOR:
928d57e3 5224 case SYMBOL_REF:
5225 case CODE_LABEL:
5226 case PC:
5227 case CC0:
928d57e3 5228 return orig;
c09425a0 5229 case CLOBBER:
5230 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5231 return orig;
5232 break;
928d57e3 5233
5234 case SCRATCH:
5235 for (i = 0; i < copy_insn_n_scratches; i++)
5236 if (copy_insn_scratch_in[i] == orig)
5237 return copy_insn_scratch_out[i];
5238 break;
5239
5240 case CONST:
3072d30e 5241 if (shared_const_p (orig))
928d57e3 5242 return orig;
5243 break;
d823ba47 5244
928d57e3 5245 /* A MEM with a constant address is not sharable. The problem is that
5246 the constant address may need to be reloaded. If the mem is shared,
5247 then reloading one copy of this mem will cause all copies to appear
5248 to have been reloaded. */
5249
5250 default:
5251 break;
5252 }
5253
f2d0e9f1 5254 /* Copy the various flags, fields, and other information. We assume
5255 that all fields need copying, and then clear the fields that should
928d57e3 5256 not be copied. That is the sensible default behavior, and forces
5257 us to explicitly document why we are *not* copying a flag. */
f2d0e9f1 5258 copy = shallow_copy_rtx (orig);
928d57e3 5259
5260 /* We do not copy the USED flag, which is used as a mark bit during
5261 walks over the RTL. */
7c25cb91 5262 RTX_FLAG (copy, used) = 0;
928d57e3 5263
5264 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
6720e96c 5265 if (INSN_P (orig))
928d57e3 5266 {
7c25cb91 5267 RTX_FLAG (copy, jump) = 0;
5268 RTX_FLAG (copy, call) = 0;
5269 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5270 }
d823ba47 5271
928d57e3 5272 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5273
5274 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
f2d0e9f1 5275 switch (*format_ptr++)
5276 {
5277 case 'e':
5278 if (XEXP (orig, i) != NULL)
5279 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5280 break;
928d57e3 5281
f2d0e9f1 5282 case 'E':
5283 case 'V':
5284 if (XVEC (orig, i) == orig_asm_constraints_vector)
5285 XVEC (copy, i) = copy_asm_constraints_vector;
5286 else if (XVEC (orig, i) == orig_asm_operands_vector)
5287 XVEC (copy, i) = copy_asm_operands_vector;
5288 else if (XVEC (orig, i) != NULL)
5289 {
5290 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5291 for (j = 0; j < XVECLEN (copy, i); j++)
5292 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5293 }
5294 break;
928d57e3 5295
f2d0e9f1 5296 case 't':
5297 case 'w':
5298 case 'i':
5299 case 's':
5300 case 'S':
5301 case 'u':
5302 case '0':
5303 /* These are left unchanged. */
5304 break;
928d57e3 5305
f2d0e9f1 5306 default:
5307 gcc_unreachable ();
5308 }
928d57e3 5309
5310 if (code == SCRATCH)
5311 {
5312 i = copy_insn_n_scratches++;
611234b4 5313 gcc_assert (i < MAX_RECOG_OPERANDS);
928d57e3 5314 copy_insn_scratch_in[i] = orig;
5315 copy_insn_scratch_out[i] = copy;
5316 }
5317 else if (code == ASM_OPERANDS)
5318 {
d91f2122 5319 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5320 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5321 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5322 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5323 }
5324
5325 return copy;
5326}
5327
5328/* Create a new copy of an rtx.
5329 This function differs from copy_rtx in that it handles SCRATCHes and
5330 ASM_OPERANDs properly.
5331 INSN doesn't really have to be a full INSN; it could be just the
5332 pattern. */
5333rtx
35cb5232 5334copy_insn (rtx insn)
928d57e3 5335{
5336 copy_insn_n_scratches = 0;
5337 orig_asm_operands_vector = 0;
5338 orig_asm_constraints_vector = 0;
5339 copy_asm_operands_vector = 0;
5340 copy_asm_constraints_vector = 0;
5341 return copy_insn_1 (insn);
5342}
02ebfa52 5343
15bbde2b 5344/* Initialize data structures and variables in this file
5345 before generating rtl for each function. */
5346
5347void
35cb5232 5348init_emit (void)
15bbde2b 5349{
06f9d6ef 5350 set_first_insn (NULL);
5351 set_last_insn (NULL);
9845d120 5352 if (MIN_NONDEBUG_INSN_UID)
5353 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5354 else
5355 cur_insn_uid = 1;
5356 cur_debug_insn_uid = 1;
15bbde2b 5357 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
7bd3dcc4 5358 last_location = UNKNOWN_LOCATION;
15bbde2b 5359 first_label_num = label_num;
0a893c29 5360 seq_stack = NULL;
15bbde2b 5361
15bbde2b 5362 /* Init the tables that describe all the pseudo regs. */
5363
fd6ffb7c 5364 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5365
fd6ffb7c 5366 crtl->emit.regno_pointer_align
2457c754 5367 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
d4c332ff 5368
ba72912a 5369 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
fcdc122e 5370
936082bb 5371 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5372 memcpy (regno_reg_rtx,
679bcc8d 5373 initial_regno_reg_rtx,
90295bd2 5374 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5375
15bbde2b 5376 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
b079a207 5377 init_virtual_regs ();
888e0d33 5378
5379 /* Indicate that the virtual registers and stack locations are
5380 all pointers. */
e61a0a7f 5381 REG_POINTER (stack_pointer_rtx) = 1;
5382 REG_POINTER (frame_pointer_rtx) = 1;
5383 REG_POINTER (hard_frame_pointer_rtx) = 1;
5384 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5385
e61a0a7f 5386 REG_POINTER (virtual_incoming_args_rtx) = 1;
5387 REG_POINTER (virtual_stack_vars_rtx) = 1;
5388 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5389 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5390 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5391
d4c332ff 5392#ifdef STACK_BOUNDARY
80909c64 5393 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5394 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5395 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5396 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5397
5398 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5399 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5400 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5401 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5402 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5403#endif
5404
89525da0 5405#ifdef INIT_EXPANDERS
5406 INIT_EXPANDERS;
5407#endif
15bbde2b 5408}
5409
6e68dcb2 5410/* Generate a vector constant for mode MODE and constant value CONSTANT. */
886cfd4f 5411
5412static rtx
6e68dcb2 5413gen_const_vector (enum machine_mode mode, int constant)
886cfd4f 5414{
5415 rtx tem;
5416 rtvec v;
5417 int units, i;
5418 enum machine_mode inner;
5419
5420 units = GET_MODE_NUNITS (mode);
5421 inner = GET_MODE_INNER (mode);
5422
069b07bf 5423 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5424
886cfd4f 5425 v = rtvec_alloc (units);
5426
6e68dcb2 5427 /* We need to call this function after we set the scalar const_tiny_rtx
5428 entries. */
5429 gcc_assert (const_tiny_rtx[constant][(int) inner]);
886cfd4f 5430
5431 for (i = 0; i < units; ++i)
6e68dcb2 5432 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
886cfd4f 5433
9426b612 5434 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5435 return tem;
5436}
5437
9426b612 5438/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6e68dcb2 5439 all elements are zero, and the one vector when all elements are one. */
9426b612 5440rtx
35cb5232 5441gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
9426b612 5442{
6e68dcb2 5443 enum machine_mode inner = GET_MODE_INNER (mode);
5444 int nunits = GET_MODE_NUNITS (mode);
5445 rtx x;
9426b612 5446 int i;
5447
6e68dcb2 5448 /* Check to see if all of the elements have the same value. */
5449 x = RTVEC_ELT (v, nunits - 1);
5450 for (i = nunits - 2; i >= 0; i--)
5451 if (RTVEC_ELT (v, i) != x)
5452 break;
5453
5454 /* If the values are all the same, check to see if we can use one of the
5455 standard constant vectors. */
5456 if (i == -1)
5457 {
5458 if (x == CONST0_RTX (inner))
5459 return CONST0_RTX (mode);
5460 else if (x == CONST1_RTX (inner))
5461 return CONST1_RTX (mode);
5462 }
5463
5464 return gen_rtx_raw_CONST_VECTOR (mode, v);
9426b612 5465}
5466
6d8b68a3 5467/* Initialise global register information required by all functions. */
5468
5469void
5470init_emit_regs (void)
5471{
5472 int i;
d83fcaa1 5473 enum machine_mode mode;
5474 mem_attrs *attrs;
6d8b68a3 5475
5476 /* Reset register attributes */
5477 htab_empty (reg_attrs_htab);
5478
5479 /* We need reg_raw_mode, so initialize the modes now. */
5480 init_reg_modes_target ();
5481
5482 /* Assign register numbers to the globally defined register rtx. */
1a860023 5483 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5484 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5485 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6d8b68a3 5486 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5487 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5488 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5489 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5490 virtual_incoming_args_rtx =
5491 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5492 virtual_stack_vars_rtx =
5493 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5494 virtual_stack_dynamic_rtx =
5495 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5496 virtual_outgoing_args_rtx =
5497 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5498 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
60778e62 5499 virtual_preferred_stack_boundary_rtx =
5500 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6d8b68a3 5501
5502 /* Initialize RTL for commonly used hard registers. These are
5503 copied into regno_reg_rtx as we begin to compile each function. */
5504 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
679bcc8d 5505 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6d8b68a3 5506
5507#ifdef RETURN_ADDRESS_POINTER_REGNUM
5508 return_address_pointer_rtx
5509 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5510#endif
5511
6d8b68a3 5512 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5513 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5514 else
5515 pic_offset_table_rtx = NULL_RTX;
d83fcaa1 5516
5517 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5518 {
5519 mode = (enum machine_mode) i;
5520 attrs = ggc_alloc_cleared_mem_attrs ();
5521 attrs->align = BITS_PER_UNIT;
5522 attrs->addrspace = ADDR_SPACE_GENERIC;
5523 if (mode != BLKmode)
5524 {
5525 attrs->size = GEN_INT (GET_MODE_SIZE (mode));
5526 if (STRICT_ALIGNMENT)
5527 attrs->align = GET_MODE_ALIGNMENT (mode);
5528 }
5529 mode_mem_attrs[i] = attrs;
5530 }
6d8b68a3 5531}
5532
01703575 5533/* Create some permanent unique rtl objects shared between all functions. */
15bbde2b 5534
5535void
01703575 5536init_emit_once (void)
15bbde2b 5537{
5538 int i;
5539 enum machine_mode mode;
9e042f31 5540 enum machine_mode double_mode;
15bbde2b 5541
e397ad8e 5542 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5543 hash tables. */
573aba85 5544 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5545 const_int_htab_eq, NULL);
c6259b83 5546
573aba85 5547 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5548 const_double_htab_eq, NULL);
2ff23ed0 5549
e397ad8e 5550 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5551 const_fixed_htab_eq, NULL);
5552
573aba85 5553 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5554 mem_attrs_htab_eq, NULL);
ca74b940 5555 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5556 reg_attrs_htab_eq, NULL);
77695070 5557
71d7daa2 5558 /* Compute the word and byte modes. */
5559
5560 byte_mode = VOIDmode;
5561 word_mode = VOIDmode;
5562 double_mode = VOIDmode;
5563
069b07bf 5564 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5565 mode != VOIDmode;
71d7daa2 5566 mode = GET_MODE_WIDER_MODE (mode))
5567 {
5568 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5569 && byte_mode == VOIDmode)
5570 byte_mode = mode;
5571
5572 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5573 && word_mode == VOIDmode)
5574 word_mode = mode;
5575 }
5576
069b07bf 5577 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5578 mode != VOIDmode;
71d7daa2 5579 mode = GET_MODE_WIDER_MODE (mode))
5580 {
5581 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5582 && double_mode == VOIDmode)
5583 double_mode = mode;
5584 }
5585
5586 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5587
57c097d5 5588#ifdef INIT_EXPANDERS
ab5beff9 5589 /* This is to initialize {init|mark|free}_machine_status before the first
5590 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5591 end which calls push_function_context_to before the first call to
57c097d5 5592 init_function_start. */
5593 INIT_EXPANDERS;
5594#endif
5595
15bbde2b 5596 /* Create the unique rtx's for certain rtx codes and operand values. */
5597
8fd5918e 5598 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
7014838c 5599 tries to use these variables. */
15bbde2b 5600 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5601 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5602 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5603
1a60f06a 5604 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5605 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5606 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5607 else
3ad7bb1c 5608 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5609
2ff23ed0 5610 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5611 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5612 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
3fa759a9 5613
5614 dconstm1 = dconst1;
5615 dconstm1.sign = 1;
77e89269 5616
5617 dconsthalf = dconst1;
9d96125b 5618 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
15bbde2b 5619
8918c507 5620 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
15bbde2b 5621 {
3fa759a9 5622 const REAL_VALUE_TYPE *const r =
badfe841 5623 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5624
069b07bf 5625 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5626 mode != VOIDmode;
5627 mode = GET_MODE_WIDER_MODE (mode))
5628 const_tiny_rtx[i][(int) mode] =
5629 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5630
5631 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5632 mode != VOIDmode;
15bbde2b 5633 mode = GET_MODE_WIDER_MODE (mode))
2ff23ed0 5634 const_tiny_rtx[i][(int) mode] =
5635 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
15bbde2b 5636
b572011e 5637 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5638
069b07bf 5639 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5640 mode != VOIDmode;
15bbde2b 5641 mode = GET_MODE_WIDER_MODE (mode))
b572011e 5642 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5643
5644 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5645 mode != VOIDmode;
5646 mode = GET_MODE_WIDER_MODE (mode))
5647 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5648 }
5649
4248fc32 5650 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5651 mode != VOIDmode;
5652 mode = GET_MODE_WIDER_MODE (mode))
5653 {
5654 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5655 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5656 }
5657
5658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5659 mode != VOIDmode;
5660 mode = GET_MODE_WIDER_MODE (mode))
5661 {
5662 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5663 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5664 }
5665
886cfd4f 5666 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5667 mode != VOIDmode;
5668 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5669 {
5670 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5671 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5672 }
886cfd4f 5673
5674 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5675 mode != VOIDmode;
5676 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5677 {
5678 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5679 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5680 }
886cfd4f 5681
06f0b99c 5682 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5683 mode != VOIDmode;
5684 mode = GET_MODE_WIDER_MODE (mode))
5685 {
5686 FCONST0(mode).data.high = 0;
5687 FCONST0(mode).data.low = 0;
5688 FCONST0(mode).mode = mode;
e397ad8e 5689 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5690 FCONST0 (mode), mode);
06f0b99c 5691 }
5692
5693 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5694 mode != VOIDmode;
5695 mode = GET_MODE_WIDER_MODE (mode))
5696 {
5697 FCONST0(mode).data.high = 0;
5698 FCONST0(mode).data.low = 0;
5699 FCONST0(mode).mode = mode;
e397ad8e 5700 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5701 FCONST0 (mode), mode);
06f0b99c 5702 }
5703
5704 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5705 mode != VOIDmode;
5706 mode = GET_MODE_WIDER_MODE (mode))
5707 {
5708 FCONST0(mode).data.high = 0;
5709 FCONST0(mode).data.low = 0;
5710 FCONST0(mode).mode = mode;
e397ad8e 5711 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5712 FCONST0 (mode), mode);
06f0b99c 5713
5714 /* We store the value 1. */
5715 FCONST1(mode).data.high = 0;
5716 FCONST1(mode).data.low = 0;
5717 FCONST1(mode).mode = mode;
5718 lshift_double (1, 0, GET_MODE_FBIT (mode),
5719 2 * HOST_BITS_PER_WIDE_INT,
5720 &FCONST1(mode).data.low,
5721 &FCONST1(mode).data.high,
5722 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5723 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5724 FCONST1 (mode), mode);
06f0b99c 5725 }
5726
5727 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5728 mode != VOIDmode;
5729 mode = GET_MODE_WIDER_MODE (mode))
5730 {
5731 FCONST0(mode).data.high = 0;
5732 FCONST0(mode).data.low = 0;
5733 FCONST0(mode).mode = mode;
e397ad8e 5734 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5735 FCONST0 (mode), mode);
06f0b99c 5736
5737 /* We store the value 1. */
5738 FCONST1(mode).data.high = 0;
5739 FCONST1(mode).data.low = 0;
5740 FCONST1(mode).mode = mode;
5741 lshift_double (1, 0, GET_MODE_FBIT (mode),
5742 2 * HOST_BITS_PER_WIDE_INT,
5743 &FCONST1(mode).data.low,
5744 &FCONST1(mode).data.high,
5745 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5746 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5747 FCONST1 (mode), mode);
5748 }
5749
5750 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5751 mode != VOIDmode;
5752 mode = GET_MODE_WIDER_MODE (mode))
5753 {
5754 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5755 }
5756
5757 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5758 mode != VOIDmode;
5759 mode = GET_MODE_WIDER_MODE (mode))
5760 {
5761 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5762 }
5763
5764 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5765 mode != VOIDmode;
5766 mode = GET_MODE_WIDER_MODE (mode))
5767 {
5768 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5769 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5770 }
5771
5772 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5773 mode != VOIDmode;
5774 mode = GET_MODE_WIDER_MODE (mode))
5775 {
5776 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5777 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
06f0b99c 5778 }
5779
0fd4500a 5780 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5781 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5782 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 5783
065336b4 5784 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5785 if (STORE_FLAG_VALUE == 1)
5786 const_tiny_rtx[1][(int) BImode] = const1_rtx;
15bbde2b 5787}
ac6c481d 5788\f
cd0fe062 5789/* Produce exact duplicate of insn INSN after AFTER.
5790 Care updating of libcall regions if present. */
5791
5792rtx
35cb5232 5793emit_copy_of_insn_after (rtx insn, rtx after)
cd0fe062 5794{
9ce37fa7 5795 rtx new_rtx, link;
cd0fe062 5796
5797 switch (GET_CODE (insn))
5798 {
5799 case INSN:
9ce37fa7 5800 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5801 break;
5802
5803 case JUMP_INSN:
9ce37fa7 5804 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5805 break;
5806
9845d120 5807 case DEBUG_INSN:
5808 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5809 break;
5810
cd0fe062 5811 case CALL_INSN:
9ce37fa7 5812 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5813 if (CALL_INSN_FUNCTION_USAGE (insn))
9ce37fa7 5814 CALL_INSN_FUNCTION_USAGE (new_rtx)
cd0fe062 5815 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
9ce37fa7 5816 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5817 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5818 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
48e1416a 5819 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
9c2a0c05 5820 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
cd0fe062 5821 break;
5822
5823 default:
611234b4 5824 gcc_unreachable ();
cd0fe062 5825 }
5826
5827 /* Update LABEL_NUSES. */
9ce37fa7 5828 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
cd0fe062 5829
9ce37fa7 5830 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ab87d1bc 5831
98116afd 5832 /* If the old insn is frame related, then so is the new one. This is
5833 primarily needed for IA-64 unwind info which marks epilogue insns,
5834 which may be duplicated by the basic block reordering code. */
9ce37fa7 5835 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
98116afd 5836
19d2fe05 5837 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5838 will make them. REG_LABEL_TARGETs are created there too, but are
5839 supposed to be sticky, so we copy them. */
cd0fe062 5840 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
19d2fe05 5841 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
cd0fe062 5842 {
5843 if (GET_CODE (link) == EXPR_LIST)
9ce37fa7 5844 add_reg_note (new_rtx, REG_NOTE_KIND (link),
a1ddb869 5845 copy_insn_1 (XEXP (link, 0)));
cd0fe062 5846 else
9ce37fa7 5847 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
cd0fe062 5848 }
5849
9ce37fa7 5850 INSN_CODE (new_rtx) = INSN_CODE (insn);
5851 return new_rtx;
cd0fe062 5852}
1f3233d1 5853
7035b2ab 5854static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
c09425a0 5855rtx
5856gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5857{
5858 if (hard_reg_clobbers[mode][regno])
5859 return hard_reg_clobbers[mode][regno];
5860 else
5861 return (hard_reg_clobbers[mode][regno] =
5862 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5863}
5864
1f3233d1 5865#include "gt-emit-rtl.h"