]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
use templates instead of gengtype for typed allocation functions
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
3aea1f79 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
15bbde2b 3
f12b58b3 4This file is part of GCC.
15bbde2b 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
15bbde2b 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15bbde2b 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
15bbde2b 19
20
21/* Middle-to-low level generation of rtx code and insns.
22
74efa612 23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
15bbde2b 25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
74efa612 28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
8fd5918e 31 dependent is the kind of rtx's they make and what arguments they
32 use. */
15bbde2b 33
34#include "config.h"
405711de 35#include "system.h"
805e22b2 36#include "coretypes.h"
37#include "tm.h"
0b205f4c 38#include "diagnostic-core.h"
15bbde2b 39#include "rtl.h"
3fd7e17f 40#include "tree.h"
9ed99284 41#include "varasm.h"
bc61cadb 42#include "basic-block.h"
43#include "tree-eh.h"
7953c610 44#include "tm_p.h"
15bbde2b 45#include "flags.h"
46#include "function.h"
9ed99284 47#include "stringpool.h"
15bbde2b 48#include "expr.h"
49#include "regs.h"
c6b6c51f 50#include "hard-reg-set.h"
73f5c1e3 51#include "hashtab.h"
15bbde2b 52#include "insn-config.h"
0dbd1c74 53#include "recog.h"
a3426c4c 54#include "bitmap.h"
b29760a8 55#include "debug.h"
b0278d39 56#include "langhooks.h"
3072d30e 57#include "df.h"
9845d120 58#include "params.h"
98155838 59#include "target.h"
649d8da6 60
679bcc8d 61struct target_rtl default_target_rtl;
62#if SWITCHABLE_TARGET
63struct target_rtl *this_target_rtl = &default_target_rtl;
64#endif
65
66#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
67
399d45d3 68/* Commonly used modes. */
69
a92771b8 70enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
71enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9e042f31 72enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
a92771b8 73enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 74
b079a207 75/* Datastructures maintained for currently processed function in RTL form. */
76
fd6ffb7c 77struct rtl_data x_rtl;
b079a207 78
79/* Indexed by pseudo register number, gives the rtx for that pseudo.
48e1416a 80 Allocated in parallel with regno_pointer_align.
b079a207 81 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
82 with length attribute nested in top level structures. */
83
84rtx * regno_reg_rtx;
15bbde2b 85
86/* This is *not* reset after each function. It gives each CODE_LABEL
87 in the entire compilation a unique label number. */
88
9105005a 89static GTY(()) int label_num = 1;
15bbde2b 90
15bbde2b 91/* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
ba8dfb08 93 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
94 is set only for MODE_INT and MODE_VECTOR_INT modes. */
15bbde2b 95
ba8dfb08 96rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
15bbde2b 97
1a60f06a 98rtx const_true_rtx;
99
15bbde2b 100REAL_VALUE_TYPE dconst0;
101REAL_VALUE_TYPE dconst1;
102REAL_VALUE_TYPE dconst2;
103REAL_VALUE_TYPE dconstm1;
77e89269 104REAL_VALUE_TYPE dconsthalf;
15bbde2b 105
06f0b99c 106/* Record fixed-point constant 0 and 1. */
107FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
108FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
109
15bbde2b 110/* We make one copy of (const_int C) where C is in
111 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
112 to save space during the compilation and simplify comparisons of
113 integers. */
114
57c097d5 115rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 116
7d7b0bac 117/* Standard pieces of rtx, to be substituted directly into things. */
118rtx pc_rtx;
119rtx ret_rtx;
120rtx simple_return_rtx;
121rtx cc0_rtx;
122
73f5c1e3 123/* A hash table storing CONST_INTs whose absolute value is greater
124 than MAX_SAVED_CONST_INT. */
125
1f3233d1 126static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
127 htab_t const_int_htab;
73f5c1e3 128
e913b5cd 129static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t const_wide_int_htab;
131
ca74b940 132/* A hash table storing register attribute structures. */
133static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
134 htab_t reg_attrs_htab;
135
2ff23ed0 136/* A hash table storing all CONST_DOUBLEs. */
1f3233d1 137static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
138 htab_t const_double_htab;
2ff23ed0 139
e397ad8e 140/* A hash table storing all CONST_FIXEDs. */
141static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
142 htab_t const_fixed_htab;
143
fd6ffb7c 144#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
9845d120 145#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
fd6ffb7c 146#define first_label_num (crtl->emit.x_first_label_num)
15bbde2b 147
265be050 148static void set_used_decls (tree);
35cb5232 149static void mark_label_nuses (rtx);
150static hashval_t const_int_htab_hash (const void *);
151static int const_int_htab_eq (const void *, const void *);
e913b5cd 152#if TARGET_SUPPORTS_WIDE_INT
153static hashval_t const_wide_int_htab_hash (const void *);
154static int const_wide_int_htab_eq (const void *, const void *);
155static rtx lookup_const_wide_int (rtx);
156#endif
35cb5232 157static hashval_t const_double_htab_hash (const void *);
158static int const_double_htab_eq (const void *, const void *);
159static rtx lookup_const_double (rtx);
e397ad8e 160static hashval_t const_fixed_htab_hash (const void *);
161static int const_fixed_htab_eq (const void *, const void *);
162static rtx lookup_const_fixed (rtx);
35cb5232 163static hashval_t reg_attrs_htab_hash (const void *);
164static int reg_attrs_htab_eq (const void *, const void *);
165static reg_attrs *get_reg_attrs (tree, int);
6e68dcb2 166static rtx gen_const_vector (enum machine_mode, int);
0e0727c4 167static void copy_rtx_if_shared_1 (rtx *orig);
73f5c1e3 168
3cd757b1 169/* Probability of the conditional branch currently proceeded by try_split.
170 Set to -1 otherwise. */
171int split_branch_probability = -1;
649d8da6 172\f
73f5c1e3 173/* Returns a hash code for X (which is a really a CONST_INT). */
174
175static hashval_t
35cb5232 176const_int_htab_hash (const void *x)
73f5c1e3 177{
dd9b9fc5 178 return (hashval_t) INTVAL ((const_rtx) x);
73f5c1e3 179}
180
6ef828f9 181/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 182 CONST_INT) is the same as that given by Y (which is really a
183 HOST_WIDE_INT *). */
184
185static int
35cb5232 186const_int_htab_eq (const void *x, const void *y)
73f5c1e3 187{
dd9b9fc5 188 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
2ff23ed0 189}
190
e913b5cd 191#if TARGET_SUPPORTS_WIDE_INT
192/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
193
194static hashval_t
195const_wide_int_htab_hash (const void *x)
196{
197 int i;
198 HOST_WIDE_INT hash = 0;
199 const_rtx xr = (const_rtx) x;
200
201 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
202 hash += CONST_WIDE_INT_ELT (xr, i);
203
204 return (hashval_t) hash;
205}
206
207/* Returns nonzero if the value represented by X (which is really a
208 CONST_WIDE_INT) is the same as that given by Y (which is really a
209 CONST_WIDE_INT). */
210
211static int
212const_wide_int_htab_eq (const void *x, const void *y)
213{
214 int i;
b3fba3cd 215 const_rtx xr = (const_rtx) x;
216 const_rtx yr = (const_rtx) y;
e913b5cd 217 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
218 return 0;
219
220 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
221 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
222 return 0;
ddb1be65 223
e913b5cd 224 return 1;
225}
226#endif
227
2ff23ed0 228/* Returns a hash code for X (which is really a CONST_DOUBLE). */
229static hashval_t
35cb5232 230const_double_htab_hash (const void *x)
2ff23ed0 231{
dd9b9fc5 232 const_rtx const value = (const_rtx) x;
3393215f 233 hashval_t h;
2ff23ed0 234
e913b5cd 235 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
3393215f 236 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
237 else
a5760913 238 {
e2e205b3 239 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 240 /* MODE is used in the comparison, so it should be in the hash. */
241 h ^= GET_MODE (value);
242 }
2ff23ed0 243 return h;
244}
245
6ef828f9 246/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 247 is the same as that represented by Y (really a ...) */
248static int
35cb5232 249const_double_htab_eq (const void *x, const void *y)
2ff23ed0 250{
dd9b9fc5 251 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
2ff23ed0 252
253 if (GET_MODE (a) != GET_MODE (b))
254 return 0;
e913b5cd 255 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
f82a103d 256 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
257 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
258 else
259 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
260 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 261}
262
e397ad8e 263/* Returns a hash code for X (which is really a CONST_FIXED). */
264
265static hashval_t
266const_fixed_htab_hash (const void *x)
267{
a9f1838b 268 const_rtx const value = (const_rtx) x;
e397ad8e 269 hashval_t h;
270
271 h = fixed_hash (CONST_FIXED_VALUE (value));
272 /* MODE is used in the comparison, so it should be in the hash. */
273 h ^= GET_MODE (value);
274 return h;
275}
276
277/* Returns nonzero if the value represented by X (really a ...)
278 is the same as that represented by Y (really a ...). */
279
280static int
281const_fixed_htab_eq (const void *x, const void *y)
282{
a9f1838b 283 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
e397ad8e 284
285 if (GET_MODE (a) != GET_MODE (b))
286 return 0;
287 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
288}
289
d72886b5 290/* Return true if the given memory attributes are equal. */
73f5c1e3 291
d72886b5 292static bool
293mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
73f5c1e3 294{
6d58bcba 295 return (p->alias == q->alias
296 && p->offset_known_p == q->offset_known_p
297 && (!p->offset_known_p || p->offset == q->offset)
298 && p->size_known_p == q->size_known_p
299 && (!p->size_known_p || p->size == q->size)
300 && p->align == q->align
bd1a81f7 301 && p->addrspace == q->addrspace
2f16183e 302 && (p->expr == q->expr
303 || (p->expr != NULL_TREE && q->expr != NULL_TREE
304 && operand_equal_p (p->expr, q->expr, 0))));
73f5c1e3 305}
306
d72886b5 307/* Set MEM's memory attributes so that they are the same as ATTRS. */
5cc193e7 308
d72886b5 309static void
310set_mem_attrs (rtx mem, mem_attrs *attrs)
311{
d72886b5 312 /* If everything is the default, we can just clear the attributes. */
313 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
314 {
315 MEM_ATTRS (mem) = 0;
316 return;
317 }
c6259b83 318
8dc3230c 319 if (!MEM_ATTRS (mem)
320 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
c6259b83 321 {
25a27413 322 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
8dc3230c 323 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
c6259b83 324 }
73f5c1e3 325}
326
ca74b940 327/* Returns a hash code for X (which is a really a reg_attrs *). */
328
329static hashval_t
35cb5232 330reg_attrs_htab_hash (const void *x)
ca74b940 331{
aae87fc3 332 const reg_attrs *const p = (const reg_attrs *) x;
ca74b940 333
e19e0a33 334 return ((p->offset * 1000) ^ (intptr_t) p->decl);
ca74b940 335}
336
7ef5b942 337/* Returns nonzero if the value represented by X (which is really a
ca74b940 338 reg_attrs *) is the same as that given by Y (which is also really a
339 reg_attrs *). */
340
341static int
35cb5232 342reg_attrs_htab_eq (const void *x, const void *y)
ca74b940 343{
aae87fc3 344 const reg_attrs *const p = (const reg_attrs *) x;
345 const reg_attrs *const q = (const reg_attrs *) y;
ca74b940 346
347 return (p->decl == q->decl && p->offset == q->offset);
348}
349/* Allocate a new reg_attrs structure and insert it into the hash table if
350 one identical to it is not already in the table. We are doing this for
351 MEM of mode MODE. */
352
353static reg_attrs *
35cb5232 354get_reg_attrs (tree decl, int offset)
ca74b940 355{
356 reg_attrs attrs;
357 void **slot;
358
359 /* If everything is the default, we can just return zero. */
360 if (decl == 0 && offset == 0)
361 return 0;
362
363 attrs.decl = decl;
364 attrs.offset = offset;
365
366 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
367 if (*slot == 0)
368 {
25a27413 369 *slot = ggc_alloc<reg_attrs> ();
ca74b940 370 memcpy (*slot, &attrs, sizeof (reg_attrs));
371 }
372
2457c754 373 return (reg_attrs *) *slot;
ca74b940 374}
375
3072d30e 376
377#if !HAVE_blockage
e12b44a3 378/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
379 and to block register equivalences to be seen across this insn. */
3072d30e 380
381rtx
382gen_blockage (void)
383{
384 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
385 MEM_VOLATILE_P (x) = true;
386 return x;
387}
388#endif
389
390
22cf44bc 391/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
392 don't attempt to share with the various global pieces of rtl (such as
393 frame_pointer_rtx). */
394
395rtx
35cb5232 396gen_raw_REG (enum machine_mode mode, int regno)
22cf44bc 397{
398 rtx x = gen_rtx_raw_REG (mode, regno);
399 ORIGINAL_REGNO (x) = regno;
400 return x;
401}
402
7014838c 403/* There are some RTL codes that require special attention; the generation
404 functions do the raw handling. If you add to this list, modify
405 special_rtx in gengenrtl.c as well. */
406
3ad7bb1c 407rtx
35cb5232 408gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3ad7bb1c 409{
73f5c1e3 410 void **slot;
411
3ad7bb1c 412 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 413 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 414
415#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
416 if (const_true_rtx && arg == STORE_FLAG_VALUE)
417 return const_true_rtx;
418#endif
419
73f5c1e3 420 /* Look up the CONST_INT in the hash table. */
2b3dbc20 421 slot = htab_find_slot_with_hash (const_int_htab, &arg,
422 (hashval_t) arg, INSERT);
7f2875d3 423 if (*slot == 0)
d7c47c0e 424 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 425
426 return (rtx) *slot;
3ad7bb1c 427}
428
2d232d05 429rtx
35cb5232 430gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2d232d05 431{
432 return GEN_INT (trunc_int_for_mode (c, mode));
433}
434
2ff23ed0 435/* CONST_DOUBLEs might be created from pairs of integers, or from
436 REAL_VALUE_TYPEs. Also, their length is known only at run time,
437 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
438
439/* Determine whether REAL, a CONST_DOUBLE, already exists in the
440 hash table. If so, return its counterpart; otherwise add it
441 to the hash table and return it. */
442static rtx
35cb5232 443lookup_const_double (rtx real)
2ff23ed0 444{
445 void **slot = htab_find_slot (const_double_htab, real, INSERT);
446 if (*slot == 0)
447 *slot = real;
448
449 return (rtx) *slot;
450}
7f2875d3 451
2ff23ed0 452/* Return a CONST_DOUBLE rtx for a floating-point value specified by
453 VALUE in mode MODE. */
67f2a2eb 454rtx
35cb5232 455const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
67f2a2eb 456{
2ff23ed0 457 rtx real = rtx_alloc (CONST_DOUBLE);
458 PUT_MODE (real, mode);
459
e8aaae4e 460 real->u.rv = value;
2ff23ed0 461
462 return lookup_const_double (real);
463}
464
e397ad8e 465/* Determine whether FIXED, a CONST_FIXED, already exists in the
466 hash table. If so, return its counterpart; otherwise add it
467 to the hash table and return it. */
468
469static rtx
470lookup_const_fixed (rtx fixed)
471{
472 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
473 if (*slot == 0)
474 *slot = fixed;
475
476 return (rtx) *slot;
477}
478
479/* Return a CONST_FIXED rtx for a fixed-point value specified by
480 VALUE in mode MODE. */
481
482rtx
483const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
484{
485 rtx fixed = rtx_alloc (CONST_FIXED);
486 PUT_MODE (fixed, mode);
487
488 fixed->u.fv = value;
489
490 return lookup_const_fixed (fixed);
491}
492
e913b5cd 493#if TARGET_SUPPORTS_WIDE_INT == 0
33274180 494/* Constructs double_int from rtx CST. */
495
496double_int
497rtx_to_double_int (const_rtx cst)
498{
499 double_int r;
500
501 if (CONST_INT_P (cst))
cf8f0e63 502 r = double_int::from_shwi (INTVAL (cst));
78f1962f 503 else if (CONST_DOUBLE_AS_INT_P (cst))
33274180 504 {
505 r.low = CONST_DOUBLE_LOW (cst);
506 r.high = CONST_DOUBLE_HIGH (cst);
507 }
508 else
509 gcc_unreachable ();
510
511 return r;
512}
e913b5cd 513#endif
514
515#if TARGET_SUPPORTS_WIDE_INT
a342dbb2 516/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
517 If so, return its counterpart; otherwise add it to the hash table and
e913b5cd 518 return it. */
33274180 519
e913b5cd 520static rtx
521lookup_const_wide_int (rtx wint)
522{
523 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
524 if (*slot == 0)
525 *slot = wint;
33274180 526
e913b5cd 527 return (rtx) *slot;
528}
529#endif
3e052aec 530
a342dbb2 531/* Return an rtx constant for V, given that the constant has mode MODE.
532 The returned rtx will be a CONST_INT if V fits, otherwise it will be
533 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
534 (if TARGET_SUPPORTS_WIDE_INT). */
535
3e052aec 536rtx
28e557ef 537immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
3e052aec 538{
e913b5cd 539 unsigned int len = v.get_len ();
540 unsigned int prec = GET_MODE_PRECISION (mode);
541
542 /* Allow truncation but not extension since we do not know if the
543 number is signed or unsigned. */
544 gcc_assert (prec <= v.get_precision ());
545
546 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
547 return gen_int_mode (v.elt (0), mode);
548
549#if TARGET_SUPPORTS_WIDE_INT
550 {
551 unsigned int i;
552 rtx value;
ddb1be65 553 unsigned int blocks_needed
e913b5cd 554 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
555
556 if (len > blocks_needed)
557 len = blocks_needed;
558
559 value = const_wide_int_alloc (len);
560
561 /* It is so tempting to just put the mode in here. Must control
562 myself ... */
563 PUT_MODE (value, VOIDmode);
05c25ee6 564 CWI_PUT_NUM_ELEM (value, len);
e913b5cd 565
566 for (i = 0; i < len; i++)
05363b4a 567 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
e913b5cd 568
569 return lookup_const_wide_int (value);
570 }
571#else
05363b4a 572 return immed_double_const (v.elt (0), v.elt (1), mode);
e913b5cd 573#endif
3e052aec 574}
575
e913b5cd 576#if TARGET_SUPPORTS_WIDE_INT == 0
2ff23ed0 577/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
578 of ints: I0 is the low-order word and I1 is the high-order word.
24cd46a7 579 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
db20fb47 580 implied upper bits are copies of the high bit of i1. The value
581 itself is neither signed nor unsigned. Do not use this routine for
582 non-integer modes; convert to REAL_VALUE_TYPE and use
583 CONST_DOUBLE_FROM_REAL_VALUE. */
2ff23ed0 584
585rtx
35cb5232 586immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
2ff23ed0 587{
588 rtx value;
589 unsigned int i;
590
b1ca4af4 591 /* There are the following cases (note that there are no modes with
24cd46a7 592 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
b1ca4af4 593
594 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
595 gen_int_mode.
db20fb47 596 2) If the value of the integer fits into HOST_WIDE_INT anyway
597 (i.e., i1 consists only from copies of the sign bit, and sign
598 of i0 and i1 are the same), then we return a CONST_INT for i0.
b1ca4af4 599 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
2ff23ed0 600 if (mode != VOIDmode)
601 {
611234b4 602 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
603 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
604 /* We can get a 0 for an error mark. */
605 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
606 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
2ff23ed0 607
b1ca4af4 608 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
609 return gen_int_mode (i0, mode);
2ff23ed0 610 }
611
612 /* If this integer fits in one word, return a CONST_INT. */
613 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
614 return GEN_INT (i0);
615
616 /* We use VOIDmode for integers. */
617 value = rtx_alloc (CONST_DOUBLE);
618 PUT_MODE (value, VOIDmode);
619
620 CONST_DOUBLE_LOW (value) = i0;
621 CONST_DOUBLE_HIGH (value) = i1;
622
623 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
624 XWINT (value, i) = 0;
625
626 return lookup_const_double (value);
67f2a2eb 627}
e913b5cd 628#endif
67f2a2eb 629
3ad7bb1c 630rtx
35cb5232 631gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3ad7bb1c 632{
633 /* In case the MD file explicitly references the frame pointer, have
634 all such references point to the same frame pointer. This is
635 used during frame pointer elimination to distinguish the explicit
636 references to these registers from pseudos that happened to be
637 assigned to them.
638
639 If we have eliminated the frame pointer or arg pointer, we will
640 be using it as a normal register, for example as a spill
641 register. In such cases, we might be accessing it in a mode that
642 is not Pmode and therefore cannot use the pre-allocated rtx.
643
644 Also don't do this when we are making new REGs in reload, since
645 we don't want to get confused with the real pointers. */
646
c6a6cdaa 647 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3ad7bb1c 648 {
71801afc 649 if (regno == FRAME_POINTER_REGNUM
650 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 651 return frame_pointer_rtx;
5ae82d58 652#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
71801afc 653 if (regno == HARD_FRAME_POINTER_REGNUM
654 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 655 return hard_frame_pointer_rtx;
656#endif
5ae82d58 657#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
e8b59353 658 if (regno == ARG_POINTER_REGNUM)
3ad7bb1c 659 return arg_pointer_rtx;
660#endif
661#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 662 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 663 return return_address_pointer_rtx;
664#endif
3473aefe 665 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
8d43ad05 666 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
6ea47475 667 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 668 return pic_offset_table_rtx;
e8b59353 669 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 670 return stack_pointer_rtx;
671 }
672
32b53d83 673#if 0
90295bd2 674 /* If the per-function register table has been set up, try to re-use
32b53d83 675 an existing entry in that table to avoid useless generation of RTL.
676
677 This code is disabled for now until we can fix the various backends
678 which depend on having non-shared hard registers in some cases. Long
679 term we want to re-enable this code as it can significantly cut down
71801afc 680 on the amount of useless RTL that gets generated.
681
682 We'll also need to fix some code that runs after reload that wants to
683 set ORIGINAL_REGNO. */
684
90295bd2 685 if (cfun
686 && cfun->emit
687 && regno_reg_rtx
688 && regno < FIRST_PSEUDO_REGISTER
689 && reg_raw_mode[regno] == mode)
690 return regno_reg_rtx[regno];
32b53d83 691#endif
90295bd2 692
22cf44bc 693 return gen_raw_REG (mode, regno);
3ad7bb1c 694}
695
b5ba9f3a 696rtx
35cb5232 697gen_rtx_MEM (enum machine_mode mode, rtx addr)
b5ba9f3a 698{
699 rtx rt = gen_rtx_raw_MEM (mode, addr);
700
701 /* This field is not cleared by the mere allocation of the rtx, so
702 we clear it here. */
c6259b83 703 MEM_ATTRS (rt) = 0;
b5ba9f3a 704
705 return rt;
706}
701e46d0 707
e265a6da 708/* Generate a memory referring to non-trapping constant memory. */
709
710rtx
711gen_const_mem (enum machine_mode mode, rtx addr)
712{
713 rtx mem = gen_rtx_MEM (mode, addr);
714 MEM_READONLY_P (mem) = 1;
715 MEM_NOTRAP_P (mem) = 1;
716 return mem;
717}
718
00060fc2 719/* Generate a MEM referring to fixed portions of the frame, e.g., register
720 save areas. */
721
722rtx
723gen_frame_mem (enum machine_mode mode, rtx addr)
724{
725 rtx mem = gen_rtx_MEM (mode, addr);
726 MEM_NOTRAP_P (mem) = 1;
727 set_mem_alias_set (mem, get_frame_alias_set ());
728 return mem;
729}
730
731/* Generate a MEM referring to a temporary use of the stack, not part
732 of the fixed stack frame. For example, something which is pushed
733 by a target splitter. */
734rtx
735gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
736{
737 rtx mem = gen_rtx_MEM (mode, addr);
738 MEM_NOTRAP_P (mem) = 1;
18d50ae6 739 if (!cfun->calls_alloca)
00060fc2 740 set_mem_alias_set (mem, get_frame_alias_set ());
741 return mem;
742}
743
2166bbaa 744/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
745 this construct would be valid, and false otherwise. */
746
747bool
748validate_subreg (enum machine_mode omode, enum machine_mode imode,
7ecb5bb2 749 const_rtx reg, unsigned int offset)
701e46d0 750{
2166bbaa 751 unsigned int isize = GET_MODE_SIZE (imode);
752 unsigned int osize = GET_MODE_SIZE (omode);
753
754 /* All subregs must be aligned. */
755 if (offset % osize != 0)
756 return false;
757
758 /* The subreg offset cannot be outside the inner object. */
759 if (offset >= isize)
760 return false;
761
762 /* ??? This should not be here. Temporarily continue to allow word_mode
763 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
764 Generally, backends are doing something sketchy but it'll take time to
765 fix them all. */
766 if (omode == word_mode)
767 ;
768 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
769 is the culprit here, and not the backends. */
770 else if (osize >= UNITS_PER_WORD && isize >= osize)
771 ;
772 /* Allow component subregs of complex and vector. Though given the below
773 extraction rules, it's not always clear what that means. */
774 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
775 && GET_MODE_INNER (imode) == omode)
776 ;
777 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
778 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
779 represent this. It's questionable if this ought to be represented at
780 all -- why can't this all be hidden in post-reload splitters that make
781 arbitrarily mode changes to the registers themselves. */
782 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
783 ;
784 /* Subregs involving floating point modes are not allowed to
785 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
786 (subreg:SI (reg:DF) 0) isn't. */
787 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
788 {
c6a6cdaa 789 if (! (isize == osize
790 /* LRA can use subreg to store a floating point value in
791 an integer mode. Although the floating point and the
792 integer modes need the same number of hard registers,
793 the size of floating point mode can be less than the
794 integer mode. LRA also uses subregs for a register
795 should be used in different mode in on insn. */
796 || lra_in_progress))
2166bbaa 797 return false;
798 }
701e46d0 799
2166bbaa 800 /* Paradoxical subregs must have offset zero. */
801 if (osize > isize)
802 return offset == 0;
803
804 /* This is a normal subreg. Verify that the offset is representable. */
805
806 /* For hard registers, we already have most of these rules collected in
807 subreg_offset_representable_p. */
808 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
809 {
810 unsigned int regno = REGNO (reg);
811
812#ifdef CANNOT_CHANGE_MODE_CLASS
813 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
814 && GET_MODE_INNER (imode) == omode)
815 ;
816 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
817 return false;
701e46d0 818#endif
2166bbaa 819
820 return subreg_offset_representable_p (regno, imode, offset, omode);
821 }
822
823 /* For pseudo registers, we want most of the same checks. Namely:
824 If the register no larger than a word, the subreg must be lowpart.
825 If the register is larger than a word, the subreg must be the lowpart
826 of a subword. A subreg does *not* perform arbitrary bit extraction.
827 Given that we've already checked mode/offset alignment, we only have
828 to check subword subregs here. */
c6a6cdaa 829 if (osize < UNITS_PER_WORD
830 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
2166bbaa 831 {
832 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
833 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
834 if (offset % UNITS_PER_WORD != low_off)
835 return false;
836 }
837 return true;
838}
839
840rtx
841gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
842{
843 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
2ff23ed0 844 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 845}
846
c6259b83 847/* Generate a SUBREG representing the least-significant part of REG if MODE
848 is smaller than mode of REG, otherwise paradoxical SUBREG. */
849
701e46d0 850rtx
35cb5232 851gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
701e46d0 852{
853 enum machine_mode inmode;
701e46d0 854
855 inmode = GET_MODE (reg);
856 if (inmode == VOIDmode)
857 inmode = mode;
81802af6 858 return gen_rtx_SUBREG (mode, reg,
859 subreg_lowpart_offset (mode, inmode));
701e46d0 860}
e1398578 861
862rtx
863gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
864 enum var_init_status status)
865{
866 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
867 PAT_VAR_LOCATION_STATUS (x) = status;
868 return x;
869}
7014838c 870\f
15bbde2b 871
cf9ac040 872/* Create an rtvec and stores within it the RTXen passed in the arguments. */
873
15bbde2b 874rtvec
ee582a61 875gen_rtvec (int n, ...)
15bbde2b 876{
cf9ac040 877 int i;
878 rtvec rt_val;
ee582a61 879 va_list p;
15bbde2b 880
ee582a61 881 va_start (p, n);
15bbde2b 882
cf9ac040 883 /* Don't allocate an empty rtvec... */
15bbde2b 884 if (n == 0)
451c8e2f 885 {
886 va_end (p);
887 return NULL_RTVEC;
888 }
15bbde2b 889
cf9ac040 890 rt_val = rtvec_alloc (n);
e5fcd76a 891
15bbde2b 892 for (i = 0; i < n; i++)
cf9ac040 893 rt_val->elem[i] = va_arg (p, rtx);
7ad77798 894
ee582a61 895 va_end (p);
cf9ac040 896 return rt_val;
15bbde2b 897}
898
899rtvec
35cb5232 900gen_rtvec_v (int n, rtx *argp)
15bbde2b 901{
19cb6b50 902 int i;
903 rtvec rt_val;
15bbde2b 904
cf9ac040 905 /* Don't allocate an empty rtvec... */
15bbde2b 906 if (n == 0)
cf9ac040 907 return NULL_RTVEC;
15bbde2b 908
cf9ac040 909 rt_val = rtvec_alloc (n);
15bbde2b 910
911 for (i = 0; i < n; i++)
a4070a91 912 rt_val->elem[i] = *argp++;
15bbde2b 913
914 return rt_val;
915}
916\f
80c70e76 917/* Return the number of bytes between the start of an OUTER_MODE
918 in-memory value and the start of an INNER_MODE in-memory value,
919 given that the former is a lowpart of the latter. It may be a
920 paradoxical lowpart, in which case the offset will be negative
921 on big-endian targets. */
922
923int
924byte_lowpart_offset (enum machine_mode outer_mode,
925 enum machine_mode inner_mode)
926{
927 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
928 return subreg_lowpart_offset (outer_mode, inner_mode);
929 else
930 return -subreg_lowpart_offset (inner_mode, outer_mode);
931}
932\f
15bbde2b 933/* Generate a REG rtx for a new pseudo register of mode MODE.
934 This pseudo is assigned the next sequential register number. */
935
936rtx
35cb5232 937gen_reg_rtx (enum machine_mode mode)
15bbde2b 938{
19cb6b50 939 rtx val;
27a7a23a 940 unsigned int align = GET_MODE_ALIGNMENT (mode);
15bbde2b 941
1b7ff857 942 gcc_assert (can_create_pseudo_p ());
15bbde2b 943
27a7a23a 944 /* If a virtual register with bigger mode alignment is generated,
945 increase stack alignment estimation because it might be spilled
946 to stack later. */
48e1416a 947 if (SUPPORTS_STACK_ALIGNMENT
27a7a23a 948 && crtl->stack_alignment_estimated < align
949 && !crtl->stack_realign_processed)
8645d3e7 950 {
951 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
952 if (crtl->stack_alignment_estimated < min_align)
953 crtl->stack_alignment_estimated = min_align;
954 }
27a7a23a 955
316bc009 956 if (generating_concat_p
957 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
958 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 959 {
960 /* For complex modes, don't make a single pseudo.
961 Instead, make a CONCAT of two pseudos.
962 This allows noncontiguous allocation of the real and imaginary parts,
963 which makes much better code. Besides, allocating DCmode
964 pseudos overstrains reload on some machines like the 386. */
965 rtx realpart, imagpart;
e9e12845 966 enum machine_mode partmode = GET_MODE_INNER (mode);
76c37538 967
968 realpart = gen_reg_rtx (partmode);
969 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 970 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 971 }
972
b4c6ce9b 973 /* Do not call gen_reg_rtx with uninitialized crtl. */
974 gcc_assert (crtl->emit.regno_pointer_align_length);
975
ca74b940 976 /* Make sure regno_pointer_align, and regno_reg_rtx are large
fcdc122e 977 enough to have an element for this pseudo reg number. */
15bbde2b 978
fd6ffb7c 979 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
15bbde2b 980 {
fd6ffb7c 981 int old_size = crtl->emit.regno_pointer_align_length;
9ce37fa7 982 char *tmp;
fcdc122e 983 rtx *new1;
fcdc122e 984
9ce37fa7 985 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
986 memset (tmp + old_size, 0, old_size);
987 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
0a893c29 988
2457c754 989 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
0a893c29 990 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 991 regno_reg_rtx = new1;
992
fd6ffb7c 993 crtl->emit.regno_pointer_align_length = old_size * 2;
15bbde2b 994 }
995
22cf44bc 996 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 997 regno_reg_rtx[reg_rtx_no++] = val;
998 return val;
999}
1000
ea239197 1001/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1002
1003bool
1004reg_is_parm_p (rtx reg)
1005{
1006 tree decl;
1007
1008 gcc_assert (REG_P (reg));
1009 decl = REG_EXPR (reg);
1010 return (decl && TREE_CODE (decl) == PARM_DECL);
1011}
1012
80c70e76 1013/* Update NEW with the same attributes as REG, but with OFFSET added
1014 to the REG_OFFSET. */
ca74b940 1015
1a6a0f2a 1016static void
9ce37fa7 1017update_reg_offset (rtx new_rtx, rtx reg, int offset)
ca74b940 1018{
9ce37fa7 1019 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
35cb5232 1020 REG_OFFSET (reg) + offset);
1a6a0f2a 1021}
1022
80c70e76 1023/* Generate a register with same attributes as REG, but with OFFSET
1024 added to the REG_OFFSET. */
1a6a0f2a 1025
1026rtx
1027gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1028 int offset)
1029{
9ce37fa7 1030 rtx new_rtx = gen_rtx_REG (mode, regno);
1a6a0f2a 1031
9ce37fa7 1032 update_reg_offset (new_rtx, reg, offset);
1033 return new_rtx;
1a6a0f2a 1034}
1035
1036/* Generate a new pseudo-register with the same attributes as REG, but
80c70e76 1037 with OFFSET added to the REG_OFFSET. */
1a6a0f2a 1038
1039rtx
1040gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1041{
9ce37fa7 1042 rtx new_rtx = gen_reg_rtx (mode);
1a6a0f2a 1043
9ce37fa7 1044 update_reg_offset (new_rtx, reg, offset);
1045 return new_rtx;
ca74b940 1046}
1047
80c70e76 1048/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1049 new register is a (possibly paradoxical) lowpart of the old one. */
ca74b940 1050
1051void
80c70e76 1052adjust_reg_mode (rtx reg, enum machine_mode mode)
ca74b940 1053{
80c70e76 1054 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1055 PUT_MODE (reg, mode);
1056}
1057
1058/* Copy REG's attributes from X, if X has any attributes. If REG and X
1059 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1060
1061void
1062set_reg_attrs_from_value (rtx reg, rtx x)
1063{
1064 int offset;
e623c80a 1065 bool can_be_reg_pointer = true;
1066
1067 /* Don't call mark_reg_pointer for incompatible pointer sign
1068 extension. */
1069 while (GET_CODE (x) == SIGN_EXTEND
1070 || GET_CODE (x) == ZERO_EXTEND
1071 || GET_CODE (x) == TRUNCATE
1072 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1073 {
1074#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1075 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1076 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1077 can_be_reg_pointer = false;
1078#endif
1079 x = XEXP (x, 0);
1080 }
80c70e76 1081
ac56145e 1082 /* Hard registers can be reused for multiple purposes within the same
1083 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1084 on them is wrong. */
1085 if (HARD_REGISTER_P (reg))
1086 return;
1087
80c70e76 1088 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
ae12ddda 1089 if (MEM_P (x))
1090 {
da443c27 1091 if (MEM_OFFSET_KNOWN_P (x))
1092 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1093 MEM_OFFSET (x) + offset);
e623c80a 1094 if (can_be_reg_pointer && MEM_POINTER (x))
40b93dba 1095 mark_reg_pointer (reg, 0);
ae12ddda 1096 }
1097 else if (REG_P (x))
1098 {
1099 if (REG_ATTRS (x))
1100 update_reg_offset (reg, x, offset);
e623c80a 1101 if (can_be_reg_pointer && REG_POINTER (x))
ae12ddda 1102 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1103 }
1104}
1105
1106/* Generate a REG rtx for a new pseudo register, copying the mode
1107 and attributes from X. */
1108
1109rtx
1110gen_reg_rtx_and_attrs (rtx x)
1111{
1112 rtx reg = gen_reg_rtx (GET_MODE (x));
1113 set_reg_attrs_from_value (reg, x);
1114 return reg;
ca74b940 1115}
1116
263c416c 1117/* Set the register attributes for registers contained in PARM_RTX.
1118 Use needed values from memory attributes of MEM. */
1119
1120void
35cb5232 1121set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
263c416c 1122{
8ad4c111 1123 if (REG_P (parm_rtx))
80c70e76 1124 set_reg_attrs_from_value (parm_rtx, mem);
263c416c 1125 else if (GET_CODE (parm_rtx) == PARALLEL)
1126 {
1127 /* Check for a NULL entry in the first slot, used to indicate that the
1128 parameter goes both on the stack and in registers. */
1129 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1130 for (; i < XVECLEN (parm_rtx, 0); i++)
1131 {
1132 rtx x = XVECEXP (parm_rtx, 0, i);
8ad4c111 1133 if (REG_P (XEXP (x, 0)))
263c416c 1134 REG_ATTRS (XEXP (x, 0))
1135 = get_reg_attrs (MEM_EXPR (mem),
1136 INTVAL (XEXP (x, 1)));
1137 }
1138 }
1139}
1140
80c70e76 1141/* Set the REG_ATTRS for registers in value X, given that X represents
1142 decl T. */
ca74b940 1143
a8dd994c 1144void
80c70e76 1145set_reg_attrs_for_decl_rtl (tree t, rtx x)
1146{
1147 if (GET_CODE (x) == SUBREG)
ebfc27f5 1148 {
80c70e76 1149 gcc_assert (subreg_lowpart_p (x));
1150 x = SUBREG_REG (x);
ebfc27f5 1151 }
8ad4c111 1152 if (REG_P (x))
80c70e76 1153 REG_ATTRS (x)
1154 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
3ff99faa 1155 DECL_MODE (t)));
ca74b940 1156 if (GET_CODE (x) == CONCAT)
1157 {
1158 if (REG_P (XEXP (x, 0)))
1159 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1160 if (REG_P (XEXP (x, 1)))
1161 REG_ATTRS (XEXP (x, 1))
1162 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1163 }
1164 if (GET_CODE (x) == PARALLEL)
1165 {
85d25060 1166 int i, start;
1167
1168 /* Check for a NULL entry, used to indicate that the parameter goes
1169 both on the stack and in registers. */
1170 if (XEXP (XVECEXP (x, 0, 0), 0))
1171 start = 0;
1172 else
1173 start = 1;
1174
1175 for (i = start; i < XVECLEN (x, 0); i++)
ca74b940 1176 {
1177 rtx y = XVECEXP (x, 0, i);
1178 if (REG_P (XEXP (y, 0)))
1179 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1180 }
1181 }
1182}
1183
80c70e76 1184/* Assign the RTX X to declaration T. */
1185
1186void
1187set_decl_rtl (tree t, rtx x)
1188{
1189 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1190 if (x)
1191 set_reg_attrs_for_decl_rtl (t, x);
1192}
1193
d91cf567 1194/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1195 if the ABI requires the parameter to be passed by reference. */
80c70e76 1196
1197void
d91cf567 1198set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
80c70e76 1199{
1200 DECL_INCOMING_RTL (t) = x;
d91cf567 1201 if (x && !by_reference_p)
80c70e76 1202 set_reg_attrs_for_decl_rtl (t, x);
1203}
1204
de8ecfb5 1205/* Identify REG (which may be a CONCAT) as a user register. */
1206
1207void
35cb5232 1208mark_user_reg (rtx reg)
de8ecfb5 1209{
1210 if (GET_CODE (reg) == CONCAT)
1211 {
1212 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1213 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1214 }
de8ecfb5 1215 else
611234b4 1216 {
1217 gcc_assert (REG_P (reg));
1218 REG_USERVAR_P (reg) = 1;
1219 }
de8ecfb5 1220}
1221
d4c332ff 1222/* Identify REG as a probable pointer register and show its alignment
1223 as ALIGN, if nonzero. */
15bbde2b 1224
1225void
35cb5232 1226mark_reg_pointer (rtx reg, int align)
15bbde2b 1227{
e61a0a7f 1228 if (! REG_POINTER (reg))
612409a6 1229 {
e61a0a7f 1230 REG_POINTER (reg) = 1;
d4c332ff 1231
612409a6 1232 if (align)
1233 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1234 }
1235 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
8b332087 1236 /* We can no-longer be sure just how aligned this pointer is. */
d4c332ff 1237 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 1238}
1239
1240/* Return 1 plus largest pseudo reg number used in the current function. */
1241
1242int
35cb5232 1243max_reg_num (void)
15bbde2b 1244{
1245 return reg_rtx_no;
1246}
1247
1248/* Return 1 + the largest label number used so far in the current function. */
1249
1250int
35cb5232 1251max_label_num (void)
15bbde2b 1252{
15bbde2b 1253 return label_num;
1254}
1255
1256/* Return first label number used in this function (if any were used). */
1257
1258int
35cb5232 1259get_first_label_num (void)
15bbde2b 1260{
1261 return first_label_num;
1262}
4ee9c684 1263
1264/* If the rtx for label was created during the expansion of a nested
1265 function, then first_label_num won't include this label number.
f0b5f617 1266 Fix this now so that array indices work later. */
4ee9c684 1267
1268void
1269maybe_set_first_label_num (rtx x)
1270{
1271 if (CODE_LABEL_NUMBER (x) < first_label_num)
1272 first_label_num = CODE_LABEL_NUMBER (x);
1273}
15bbde2b 1274\f
1275/* Return a value representing some low-order bits of X, where the number
1276 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1277 between floating-point and fixed-point values, rather, the bit
15bbde2b 1278 representation is returned.
1279
1280 This function handles the cases in common between gen_lowpart, below,
1281 and two variants in cse.c and combine.c. These are the cases that can
1282 be safely handled at all points in the compilation.
1283
1284 If this is not a case we can handle, return 0. */
1285
1286rtx
35cb5232 1287gen_lowpart_common (enum machine_mode mode, rtx x)
15bbde2b 1288{
701e46d0 1289 int msize = GET_MODE_SIZE (mode);
791172c5 1290 int xsize;
701e46d0 1291 int offset = 0;
791172c5 1292 enum machine_mode innermode;
1293
1294 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1295 so we have to make one up. Yuk. */
1296 innermode = GET_MODE (x);
971ba038 1297 if (CONST_INT_P (x)
6c799a83 1298 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
791172c5 1299 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1300 else if (innermode == VOIDmode)
24cd46a7 1301 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
48e1416a 1302
791172c5 1303 xsize = GET_MODE_SIZE (innermode);
1304
611234b4 1305 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
15bbde2b 1306
791172c5 1307 if (innermode == mode)
15bbde2b 1308 return x;
1309
1310 /* MODE must occupy no more words than the mode of X. */
791172c5 1311 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1312 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
15bbde2b 1313 return 0;
1314
9abe1e73 1315 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
cee7491d 1316 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
9abe1e73 1317 return 0;
1318
791172c5 1319 offset = subreg_lowpart_offset (mode, innermode);
15bbde2b 1320
1321 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 1322 && (GET_MODE_CLASS (mode) == MODE_INT
1323 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 1324 {
1325 /* If we are getting the low-order part of something that has been
1326 sign- or zero-extended, we can either just use the object being
1327 extended or make a narrower extension. If we want an even smaller
1328 piece than the size of the object being extended, call ourselves
1329 recursively.
1330
1331 This case is used mostly by combine and cse. */
1332
1333 if (GET_MODE (XEXP (x, 0)) == mode)
1334 return XEXP (x, 0);
791172c5 1335 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
15bbde2b 1336 return gen_lowpart_common (mode, XEXP (x, 0));
791172c5 1337 else if (msize < xsize)
3ad7bb1c 1338 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 1339 }
8ad4c111 1340 else if (GET_CODE (x) == SUBREG || REG_P (x)
791172c5 1341 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
efa08fc2 1342 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
791172c5 1343 return simplify_gen_subreg (mode, x, innermode, offset);
4a307dd5 1344
15bbde2b 1345 /* Otherwise, we can't do this. */
1346 return 0;
1347}
1348\f
d56d0ca2 1349rtx
35cb5232 1350gen_highpart (enum machine_mode mode, rtx x)
d56d0ca2 1351{
701e46d0 1352 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1353 rtx result;
701e46d0 1354
d56d0ca2 1355 /* This case loses if X is a subreg. To catch bugs early,
1356 complain if an invalid MODE is used even in other cases. */
611234b4 1357 gcc_assert (msize <= UNITS_PER_WORD
1358 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
701e46d0 1359
81802af6 1360 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1361 subreg_highpart_offset (mode, GET_MODE (x)));
611234b4 1362 gcc_assert (result);
48e1416a 1363
a8c36ab2 1364 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1365 the target if we have a MEM. gen_highpart must return a valid operand,
1366 emitting code if necessary to do so. */
611234b4 1367 if (MEM_P (result))
1368 {
1369 result = validize_mem (result);
1370 gcc_assert (result);
1371 }
48e1416a 1372
81802af6 1373 return result;
1374}
704fcf2b 1375
29d56731 1376/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1377 be VOIDmode constant. */
1378rtx
35cb5232 1379gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
704fcf2b 1380{
1381 if (GET_MODE (exp) != VOIDmode)
1382 {
611234b4 1383 gcc_assert (GET_MODE (exp) == innermode);
704fcf2b 1384 return gen_highpart (outermode, exp);
1385 }
1386 return simplify_gen_subreg (outermode, exp, innermode,
1387 subreg_highpart_offset (outermode, innermode));
1388}
d4c5e26d 1389
80c70e76 1390/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
10ef59ac 1391
81802af6 1392unsigned int
35cb5232 1393subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
81802af6 1394{
1395 unsigned int offset = 0;
1396 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1397
81802af6 1398 if (difference > 0)
d56d0ca2 1399 {
81802af6 1400 if (WORDS_BIG_ENDIAN)
1401 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1402 if (BYTES_BIG_ENDIAN)
1403 offset += difference % UNITS_PER_WORD;
d56d0ca2 1404 }
701e46d0 1405
81802af6 1406 return offset;
d56d0ca2 1407}
64ab453f 1408
81802af6 1409/* Return offset in bytes to get OUTERMODE high part
1410 of the value in mode INNERMODE stored in memory in target format. */
1411unsigned int
35cb5232 1412subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
64ab453f 1413{
1414 unsigned int offset = 0;
1415 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1416
611234b4 1417 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
81802af6 1418
64ab453f 1419 if (difference > 0)
1420 {
81802af6 1421 if (! WORDS_BIG_ENDIAN)
64ab453f 1422 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1423 if (! BYTES_BIG_ENDIAN)
64ab453f 1424 offset += difference % UNITS_PER_WORD;
1425 }
1426
81802af6 1427 return offset;
64ab453f 1428}
d56d0ca2 1429
15bbde2b 1430/* Return 1 iff X, assumed to be a SUBREG,
1431 refers to the least significant part of its containing reg.
1432 If X is not a SUBREG, always return 1 (it is its own low part!). */
1433
1434int
b7bf20db 1435subreg_lowpart_p (const_rtx x)
15bbde2b 1436{
1437 if (GET_CODE (x) != SUBREG)
1438 return 1;
7e14c1bf 1439 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1440 return 0;
15bbde2b 1441
81802af6 1442 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1443 == SUBREG_BYTE (x));
15bbde2b 1444}
b537bfdb 1445
1446/* Return true if X is a paradoxical subreg, false otherwise. */
1447bool
1448paradoxical_subreg_p (const_rtx x)
1449{
1450 if (GET_CODE (x) != SUBREG)
1451 return false;
1452 return (GET_MODE_PRECISION (GET_MODE (x))
1453 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1454}
15bbde2b 1455\f
701e46d0 1456/* Return subword OFFSET of operand OP.
1457 The word number, OFFSET, is interpreted as the word number starting
1458 at the low-order address. OFFSET 0 is the low-order word if not
1459 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1460
1461 If we cannot extract the required word, we return zero. Otherwise,
1462 an rtx corresponding to the requested word will be returned.
1463
1464 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1465 reload has completed, a valid address will always be returned. After
1466 reload, if a valid address cannot be returned, we return zero.
1467
1468 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1469 it is the responsibility of the caller.
1470
1471 MODE is the mode of OP in case it is a CONST_INT.
1472
1473 ??? This is still rather broken for some cases. The problem for the
1474 moment is that all callers of this thing provide no 'goal mode' to
1475 tell us to work with. This exists because all callers were written
84e81e84 1476 in a word based SUBREG world.
1477 Now use of this function can be deprecated by simplify_subreg in most
1478 cases.
1479 */
701e46d0 1480
1481rtx
35cb5232 1482operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
701e46d0 1483{
1484 if (mode == VOIDmode)
1485 mode = GET_MODE (op);
1486
611234b4 1487 gcc_assert (mode != VOIDmode);
701e46d0 1488
6312a35e 1489 /* If OP is narrower than a word, fail. */
701e46d0 1490 if (mode != BLKmode
1491 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1492 return 0;
1493
6312a35e 1494 /* If we want a word outside OP, return zero. */
701e46d0 1495 if (mode != BLKmode
1496 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1497 return const0_rtx;
1498
701e46d0 1499 /* Form a new MEM at the requested address. */
e16ceb8e 1500 if (MEM_P (op))
701e46d0 1501 {
9ce37fa7 1502 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1503
e4e86ec5 1504 if (! validate_address)
9ce37fa7 1505 return new_rtx;
e4e86ec5 1506
1507 else if (reload_completed)
701e46d0 1508 {
bd1a81f7 1509 if (! strict_memory_address_addr_space_p (word_mode,
1510 XEXP (new_rtx, 0),
1511 MEM_ADDR_SPACE (op)))
e4e86ec5 1512 return 0;
701e46d0 1513 }
e4e86ec5 1514 else
9ce37fa7 1515 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
701e46d0 1516 }
1517
84e81e84 1518 /* Rest can be handled by simplify_subreg. */
1519 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1520}
1521
89f18f73 1522/* Similar to `operand_subword', but never return 0. If we can't
1523 extract the required subword, put OP into a register and try again.
1524 The second attempt must succeed. We always validate the address in
1525 this case.
15bbde2b 1526
1527 MODE is the mode of OP, in case it is CONST_INT. */
1528
1529rtx
35cb5232 1530operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
15bbde2b 1531{
701e46d0 1532 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1533
1534 if (result)
1535 return result;
1536
1537 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1538 {
1539 /* If this is a register which can not be accessed by words, copy it
1540 to a pseudo register. */
8ad4c111 1541 if (REG_P (op))
ac825d29 1542 op = copy_to_reg (op);
1543 else
1544 op = force_reg (mode, op);
1545 }
15bbde2b 1546
701e46d0 1547 result = operand_subword (op, offset, 1, mode);
611234b4 1548 gcc_assert (result);
15bbde2b 1549
1550 return result;
1551}
1552\f
b3ff8d90 1553/* Returns 1 if both MEM_EXPR can be considered equal
1554 and 0 otherwise. */
1555
1556int
52d07779 1557mem_expr_equal_p (const_tree expr1, const_tree expr2)
b3ff8d90 1558{
1559 if (expr1 == expr2)
1560 return 1;
1561
1562 if (! expr1 || ! expr2)
1563 return 0;
1564
1565 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1566 return 0;
1567
3a443843 1568 return operand_equal_p (expr1, expr2, 0);
b3ff8d90 1569}
1570
ad0a178f 1571/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1572 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1573 -1 if not known. */
1574
1575int
7cfdc2f0 1576get_mem_align_offset (rtx mem, unsigned int align)
ad0a178f 1577{
1578 tree expr;
1579 unsigned HOST_WIDE_INT offset;
1580
1581 /* This function can't use
da443c27 1582 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
98ab9e8f 1583 || (MAX (MEM_ALIGN (mem),
957d0361 1584 MAX (align, get_object_alignment (MEM_EXPR (mem))))
ad0a178f 1585 < align))
1586 return -1;
1587 else
da443c27 1588 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
ad0a178f 1589 for two reasons:
1590 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1591 for <variable>. get_inner_reference doesn't handle it and
1592 even if it did, the alignment in that case needs to be determined
1593 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1594 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1595 isn't sufficiently aligned, the object it is in might be. */
1596 gcc_assert (MEM_P (mem));
1597 expr = MEM_EXPR (mem);
da443c27 1598 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
ad0a178f 1599 return -1;
1600
da443c27 1601 offset = MEM_OFFSET (mem);
ad0a178f 1602 if (DECL_P (expr))
1603 {
1604 if (DECL_ALIGN (expr) < align)
1605 return -1;
1606 }
1607 else if (INDIRECT_REF_P (expr))
1608 {
1609 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1610 return -1;
1611 }
1612 else if (TREE_CODE (expr) == COMPONENT_REF)
1613 {
1614 while (1)
1615 {
1616 tree inner = TREE_OPERAND (expr, 0);
1617 tree field = TREE_OPERAND (expr, 1);
1618 tree byte_offset = component_ref_field_offset (expr);
1619 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1620
1621 if (!byte_offset
e913b5cd 1622 || !tree_fits_uhwi_p (byte_offset)
1623 || !tree_fits_uhwi_p (bit_offset))
ad0a178f 1624 return -1;
1625
e913b5cd 1626 offset += tree_to_uhwi (byte_offset);
1627 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
ad0a178f 1628
1629 if (inner == NULL_TREE)
1630 {
1631 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1632 < (unsigned int) align)
1633 return -1;
1634 break;
1635 }
1636 else if (DECL_P (inner))
1637 {
1638 if (DECL_ALIGN (inner) < align)
1639 return -1;
1640 break;
1641 }
1642 else if (TREE_CODE (inner) != COMPONENT_REF)
1643 return -1;
1644 expr = inner;
1645 }
1646 }
1647 else
1648 return -1;
1649
1650 return offset & ((align / BITS_PER_UNIT) - 1);
1651}
1652
310b57a1 1653/* Given REF (a MEM) and T, either the type of X or the expression
c6259b83 1654 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1655 if we are making a new object of this type. BITPOS is nonzero if
1656 there is an offset outstanding on T that will be applied later. */
c6259b83 1657
1658void
35cb5232 1659set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1660 HOST_WIDE_INT bitpos)
c6259b83 1661{
6f717f77 1662 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1663 tree type;
d72886b5 1664 struct mem_attrs attrs, *defattrs, *refattrs;
3f06bd1b 1665 addr_space_t as;
c6259b83 1666
1667 /* It can happen that type_for_mode was given a mode for which there
1668 is no language-level type. In which case it returns NULL, which
1669 we can see here. */
1670 if (t == NULL_TREE)
1671 return;
1672
1673 type = TYPE_P (t) ? t : TREE_TYPE (t);
4ccffa39 1674 if (type == error_mark_node)
1675 return;
c6259b83 1676
c6259b83 1677 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1678 wrong answer, as it assumes that DECL_RTL already has the right alias
1679 info. Callers should not set DECL_RTL until after the call to
1680 set_mem_attributes. */
611234b4 1681 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
c6259b83 1682
d72886b5 1683 memset (&attrs, 0, sizeof (attrs));
1684
96216d37 1685 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1686 front-end routine) and use it. */
d72886b5 1687 attrs.alias = get_alias_set (t);
c6259b83 1688
fbc6244b 1689 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
8d350e69 1690 MEM_POINTER (ref) = POINTER_TYPE_P (type);
c6259b83 1691
d8dccfe9 1692 /* Default values from pre-existing memory attributes if present. */
d72886b5 1693 refattrs = MEM_ATTRS (ref);
1694 if (refattrs)
d8dccfe9 1695 {
1696 /* ??? Can this ever happen? Calling this routine on a MEM that
1697 already carries memory attributes should probably be invalid. */
d72886b5 1698 attrs.expr = refattrs->expr;
6d58bcba 1699 attrs.offset_known_p = refattrs->offset_known_p;
d72886b5 1700 attrs.offset = refattrs->offset;
6d58bcba 1701 attrs.size_known_p = refattrs->size_known_p;
d72886b5 1702 attrs.size = refattrs->size;
1703 attrs.align = refattrs->align;
d8dccfe9 1704 }
1705
1706 /* Otherwise, default values from the mode of the MEM reference. */
d72886b5 1707 else
d8dccfe9 1708 {
d72886b5 1709 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1710 gcc_assert (!defattrs->expr);
6d58bcba 1711 gcc_assert (!defattrs->offset_known_p);
d72886b5 1712
d8dccfe9 1713 /* Respect mode size. */
6d58bcba 1714 attrs.size_known_p = defattrs->size_known_p;
d72886b5 1715 attrs.size = defattrs->size;
d8dccfe9 1716 /* ??? Is this really necessary? We probably should always get
1717 the size from the type below. */
1718
1719 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1720 if T is an object, always compute the object alignment below. */
d72886b5 1721 if (TYPE_P (t))
1722 attrs.align = defattrs->align;
1723 else
1724 attrs.align = BITS_PER_UNIT;
d8dccfe9 1725 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1726 e.g. if the type carries an alignment attribute. Should we be
1727 able to simply always use TYPE_ALIGN? */
1728 }
1729
a9d9ab08 1730 /* We can set the alignment from the type if we are making an object,
1731 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
679e0056 1732 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
d72886b5 1733 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
679e0056 1734
96216d37 1735 /* If the size is known, we can set that. */
50ba3acc 1736 tree new_size = TYPE_SIZE_UNIT (type);
96216d37 1737
9eec20bf 1738 /* The address-space is that of the type. */
1739 as = TYPE_ADDR_SPACE (type);
1740
579bccf9 1741 /* If T is not a type, we may be able to deduce some more information about
1742 the expression. */
1743 if (! TYPE_P (t))
2a631e19 1744 {
ae2dd339 1745 tree base;
b04fab2a 1746
2a631e19 1747 if (TREE_THIS_VOLATILE (t))
1748 MEM_VOLATILE_P (ref) = 1;
c6259b83 1749
3c00f11c 1750 /* Now remove any conversions: they don't change what the underlying
1751 object is. Likewise for SAVE_EXPR. */
72dd6141 1752 while (CONVERT_EXPR_P (t)
3c00f11c 1753 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1754 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1755 t = TREE_OPERAND (t, 0);
1756
73eb0a09 1757 /* Note whether this expression can trap. */
1758 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1759
1760 base = get_base_address (t);
3f06bd1b 1761 if (base)
1762 {
1763 if (DECL_P (base)
1764 && TREE_READONLY (base)
1765 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1766 && !TREE_THIS_VOLATILE (base))
1767 MEM_READONLY_P (ref) = 1;
1768
1769 /* Mark static const strings readonly as well. */
1770 if (TREE_CODE (base) == STRING_CST
1771 && TREE_READONLY (base)
1772 && TREE_STATIC (base))
1773 MEM_READONLY_P (ref) = 1;
1774
9eec20bf 1775 /* Address-space information is on the base object. */
3f06bd1b 1776 if (TREE_CODE (base) == MEM_REF
1777 || TREE_CODE (base) == TARGET_MEM_REF)
1778 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1779 0))));
1780 else
1781 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1782 }
cab98a0d 1783
2b02580f 1784 /* If this expression uses it's parent's alias set, mark it such
1785 that we won't change it. */
d400f5e1 1786 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
5cc193e7 1787 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1788
2a631e19 1789 /* If this is a decl, set the attributes of the MEM from it. */
1790 if (DECL_P (t))
1791 {
d72886b5 1792 attrs.expr = t;
6d58bcba 1793 attrs.offset_known_p = true;
1794 attrs.offset = 0;
6f717f77 1795 apply_bitpos = bitpos;
50ba3acc 1796 new_size = DECL_SIZE_UNIT (t);
2a631e19 1797 }
1798
9eec20bf 1799 /* ??? If we end up with a constant here do record a MEM_EXPR. */
ce45a448 1800 else if (CONSTANT_CLASS_P (t))
9eec20bf 1801 ;
b10dbbca 1802
50ba3acc 1803 /* If this is a field reference, record it. */
1804 else if (TREE_CODE (t) == COMPONENT_REF)
b10dbbca 1805 {
d72886b5 1806 attrs.expr = t;
6d58bcba 1807 attrs.offset_known_p = true;
1808 attrs.offset = 0;
6f717f77 1809 apply_bitpos = bitpos;
50ba3acc 1810 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1811 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
b10dbbca 1812 }
1813
1814 /* If this is an array reference, look for an outer field reference. */
1815 else if (TREE_CODE (t) == ARRAY_REF)
1816 {
1817 tree off_tree = size_zero_node;
6b039979 1818 /* We can't modify t, because we use it at the end of the
1819 function. */
1820 tree t2 = t;
b10dbbca 1821
1822 do
1823 {
6b039979 1824 tree index = TREE_OPERAND (t2, 1);
6374121b 1825 tree low_bound = array_ref_low_bound (t2);
1826 tree unit_size = array_ref_element_size (t2);
97f8ce30 1827
1828 /* We assume all arrays have sizes that are a multiple of a byte.
1829 First subtract the lower bound, if any, in the type of the
6374121b 1830 index, then convert to sizetype and multiply by the size of
1831 the array element. */
1832 if (! integer_zerop (low_bound))
faa43f85 1833 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1834 index, low_bound);
97f8ce30 1835
6374121b 1836 off_tree = size_binop (PLUS_EXPR,
535664e3 1837 size_binop (MULT_EXPR,
1838 fold_convert (sizetype,
1839 index),
6374121b 1840 unit_size),
1841 off_tree);
6b039979 1842 t2 = TREE_OPERAND (t2, 0);
b10dbbca 1843 }
6b039979 1844 while (TREE_CODE (t2) == ARRAY_REF);
b10dbbca 1845
9eec20bf 1846 if (DECL_P (t2)
1847 || TREE_CODE (t2) == COMPONENT_REF)
b10dbbca 1848 {
d72886b5 1849 attrs.expr = t2;
6d58bcba 1850 attrs.offset_known_p = false;
e913b5cd 1851 if (tree_fits_uhwi_p (off_tree))
6f717f77 1852 {
6d58bcba 1853 attrs.offset_known_p = true;
e913b5cd 1854 attrs.offset = tree_to_uhwi (off_tree);
6f717f77 1855 apply_bitpos = bitpos;
1856 }
b10dbbca 1857 }
9eec20bf 1858 /* Else do not record a MEM_EXPR. */
2d8fe5d0 1859 }
1860
6d72287b 1861 /* If this is an indirect reference, record it. */
182cf5a9 1862 else if (TREE_CODE (t) == MEM_REF
5d9de213 1863 || TREE_CODE (t) == TARGET_MEM_REF)
6d72287b 1864 {
d72886b5 1865 attrs.expr = t;
6d58bcba 1866 attrs.offset_known_p = true;
1867 attrs.offset = 0;
6d72287b 1868 apply_bitpos = bitpos;
1869 }
1870
9eec20bf 1871 /* Compute the alignment. */
1872 unsigned int obj_align;
1873 unsigned HOST_WIDE_INT obj_bitpos;
1874 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1875 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1876 if (obj_bitpos != 0)
1877 obj_align = (obj_bitpos & -obj_bitpos);
1878 attrs.align = MAX (attrs.align, obj_align);
2a631e19 1879 }
1880
e913b5cd 1881 if (tree_fits_uhwi_p (new_size))
50ba3acc 1882 {
1883 attrs.size_known_p = true;
e913b5cd 1884 attrs.size = tree_to_uhwi (new_size);
50ba3acc 1885 }
1886
e2e205b3 1887 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 1888 bit position offset. Similarly, increase the size of the accessed
1889 object to contain the negative offset. */
6f717f77 1890 if (apply_bitpos)
595f1461 1891 {
6d58bcba 1892 gcc_assert (attrs.offset_known_p);
1893 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1894 if (attrs.size_known_p)
1895 attrs.size += apply_bitpos / BITS_PER_UNIT;
595f1461 1896 }
6f717f77 1897
2a631e19 1898 /* Now set the attributes we computed above. */
3f06bd1b 1899 attrs.addrspace = as;
d72886b5 1900 set_mem_attrs (ref, &attrs);
c6259b83 1901}
1902
6f717f77 1903void
35cb5232 1904set_mem_attributes (rtx ref, tree t, int objectp)
6f717f77 1905{
1906 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1907}
1908
c6259b83 1909/* Set the alias set of MEM to SET. */
1910
1911void
32c2fdea 1912set_mem_alias_set (rtx mem, alias_set_type set)
c6259b83 1913{
d72886b5 1914 struct mem_attrs attrs;
1915
c6259b83 1916 /* If the new and old alias sets don't conflict, something is wrong. */
1b4345f7 1917 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
d72886b5 1918 attrs = *get_mem_attrs (mem);
1919 attrs.alias = set;
1920 set_mem_attrs (mem, &attrs);
bd1a81f7 1921}
1922
1923/* Set the address space of MEM to ADDRSPACE (target-defined). */
1924
1925void
1926set_mem_addr_space (rtx mem, addr_space_t addrspace)
1927{
d72886b5 1928 struct mem_attrs attrs;
1929
1930 attrs = *get_mem_attrs (mem);
1931 attrs.addrspace = addrspace;
1932 set_mem_attrs (mem, &attrs);
c6259b83 1933}
96216d37 1934
1c4512da 1935/* Set the alignment of MEM to ALIGN bits. */
96216d37 1936
1937void
35cb5232 1938set_mem_align (rtx mem, unsigned int align)
96216d37 1939{
d72886b5 1940 struct mem_attrs attrs;
1941
1942 attrs = *get_mem_attrs (mem);
1943 attrs.align = align;
1944 set_mem_attrs (mem, &attrs);
96216d37 1945}
278fe152 1946
b10dbbca 1947/* Set the expr for MEM to EXPR. */
278fe152 1948
1949void
35cb5232 1950set_mem_expr (rtx mem, tree expr)
278fe152 1951{
d72886b5 1952 struct mem_attrs attrs;
1953
1954 attrs = *get_mem_attrs (mem);
1955 attrs.expr = expr;
1956 set_mem_attrs (mem, &attrs);
278fe152 1957}
b10dbbca 1958
1959/* Set the offset of MEM to OFFSET. */
1960
1961void
da443c27 1962set_mem_offset (rtx mem, HOST_WIDE_INT offset)
b10dbbca 1963{
d72886b5 1964 struct mem_attrs attrs;
1965
1966 attrs = *get_mem_attrs (mem);
6d58bcba 1967 attrs.offset_known_p = true;
1968 attrs.offset = offset;
da443c27 1969 set_mem_attrs (mem, &attrs);
1970}
1971
1972/* Clear the offset of MEM. */
1973
1974void
1975clear_mem_offset (rtx mem)
1976{
1977 struct mem_attrs attrs;
1978
1979 attrs = *get_mem_attrs (mem);
6d58bcba 1980 attrs.offset_known_p = false;
d72886b5 1981 set_mem_attrs (mem, &attrs);
f0500469 1982}
1983
1984/* Set the size of MEM to SIZE. */
1985
1986void
5b2a69fa 1987set_mem_size (rtx mem, HOST_WIDE_INT size)
f0500469 1988{
d72886b5 1989 struct mem_attrs attrs;
1990
1991 attrs = *get_mem_attrs (mem);
6d58bcba 1992 attrs.size_known_p = true;
1993 attrs.size = size;
5b2a69fa 1994 set_mem_attrs (mem, &attrs);
1995}
1996
1997/* Clear the size of MEM. */
1998
1999void
2000clear_mem_size (rtx mem)
2001{
2002 struct mem_attrs attrs;
2003
2004 attrs = *get_mem_attrs (mem);
6d58bcba 2005 attrs.size_known_p = false;
d72886b5 2006 set_mem_attrs (mem, &attrs);
b10dbbca 2007}
c6259b83 2008\f
96216d37 2009/* Return a memory reference like MEMREF, but with its mode changed to MODE
2010 and its address changed to ADDR. (VOIDmode means don't change the mode.
2011 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
5cc04e45 2012 returned memory location is required to be valid. INPLACE is true if any
2013 changes can be made directly to MEMREF or false if MEMREF must be treated
2014 as immutable.
2015
2016 The memory attributes are not changed. */
15bbde2b 2017
96216d37 2018static rtx
5cc04e45 2019change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2020 bool inplace)
15bbde2b 2021{
bd1a81f7 2022 addr_space_t as;
9ce37fa7 2023 rtx new_rtx;
15bbde2b 2024
611234b4 2025 gcc_assert (MEM_P (memref));
bd1a81f7 2026 as = MEM_ADDR_SPACE (memref);
15bbde2b 2027 if (mode == VOIDmode)
2028 mode = GET_MODE (memref);
2029 if (addr == 0)
2030 addr = XEXP (memref, 0);
3988ef8b 2031 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
bd1a81f7 2032 && (!validate || memory_address_addr_space_p (mode, addr, as)))
3988ef8b 2033 return memref;
15bbde2b 2034
73a18f44 2035 /* Don't validate address for LRA. LRA can make the address valid
2036 by itself in most efficient way. */
2037 if (validate && !lra_in_progress)
15bbde2b 2038 {
e4e86ec5 2039 if (reload_in_progress || reload_completed)
bd1a81f7 2040 gcc_assert (memory_address_addr_space_p (mode, addr, as));
e4e86ec5 2041 else
bd1a81f7 2042 addr = memory_address_addr_space (mode, addr, as);
15bbde2b 2043 }
d823ba47 2044
e8976cd7 2045 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2046 return memref;
2047
5cc04e45 2048 if (inplace)
2049 {
2050 XEXP (memref, 0) = addr;
2051 return memref;
2052 }
2053
9ce37fa7 2054 new_rtx = gen_rtx_MEM (mode, addr);
2055 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2056 return new_rtx;
15bbde2b 2057}
537ffcfc 2058
96216d37 2059/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2060 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 2061
2062rtx
35cb5232 2063change_address (rtx memref, enum machine_mode mode, rtx addr)
e513d163 2064{
5cc04e45 2065 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
9ce37fa7 2066 enum machine_mode mmode = GET_MODE (new_rtx);
d72886b5 2067 struct mem_attrs attrs, *defattrs;
0ab96142 2068
d72886b5 2069 attrs = *get_mem_attrs (memref);
2070 defattrs = mode_mem_attrs[(int) mmode];
6d58bcba 2071 attrs.expr = NULL_TREE;
2072 attrs.offset_known_p = false;
2073 attrs.size_known_p = defattrs->size_known_p;
d72886b5 2074 attrs.size = defattrs->size;
2075 attrs.align = defattrs->align;
6cc60c4d 2076
d28edf0d 2077 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2078 if (new_rtx == memref)
0ab96142 2079 {
d72886b5 2080 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
9ce37fa7 2081 return new_rtx;
0ab96142 2082
9ce37fa7 2083 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2084 MEM_COPY_ATTRIBUTES (new_rtx, memref);
0ab96142 2085 }
d28edf0d 2086
d72886b5 2087 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2088 return new_rtx;
e513d163 2089}
537ffcfc 2090
96216d37 2091/* Return a memory reference like MEMREF, but with its mode changed
2092 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 2093 nonzero, the memory address is forced to be valid.
2d0fd66d 2094 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2095 and the caller is responsible for adjusting MEMREF base register.
2096 If ADJUST_OBJECT is zero, the underlying object associated with the
2097 memory reference is left unchanged and the caller is responsible for
2098 dealing with it. Otherwise, if the new memory reference is outside
226c6baf 2099 the underlying object, even partially, then the object is dropped.
2100 SIZE, if nonzero, is the size of an access in cases where MODE
2101 has no inherent size. */
e4e86ec5 2102
2103rtx
35cb5232 2104adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
226c6baf 2105 int validate, int adjust_address, int adjust_object,
2106 HOST_WIDE_INT size)
e4e86ec5 2107{
fb257ae6 2108 rtx addr = XEXP (memref, 0);
9ce37fa7 2109 rtx new_rtx;
d72886b5 2110 enum machine_mode address_mode;
cfb75cdf 2111 int pbits;
21b8bc7e 2112 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
d72886b5 2113 unsigned HOST_WIDE_INT max_align;
21b8bc7e 2114#ifdef POINTERS_EXTEND_UNSIGNED
2115 enum machine_mode pointer_mode
2116 = targetm.addr_space.pointer_mode (attrs.addrspace);
2117#endif
fb257ae6 2118
4733f549 2119 /* VOIDmode means no mode change for change_address_1. */
2120 if (mode == VOIDmode)
2121 mode = GET_MODE (memref);
2122
226c6baf 2123 /* Take the size of non-BLKmode accesses from the mode. */
2124 defattrs = mode_mem_attrs[(int) mode];
2125 if (defattrs->size_known_p)
2126 size = defattrs->size;
2127
d28edf0d 2128 /* If there are no changes, just return the original memory reference. */
2129 if (mode == GET_MODE (memref) && !offset
226c6baf 2130 && (size == 0 || (attrs.size_known_p && attrs.size == size))
d72886b5 2131 && (!validate || memory_address_addr_space_p (mode, addr,
2132 attrs.addrspace)))
d28edf0d 2133 return memref;
2134
e36c3d58 2135 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 2136 This may happen even if offset is nonzero -- consider
e36c3d58 2137 (plus (plus reg reg) const_int) -- so do this always. */
2138 addr = copy_rtx (addr);
2139
cfb75cdf 2140 /* Convert a possibly large offset to a signed value within the
2141 range of the target address space. */
87cf5753 2142 address_mode = get_address_mode (memref);
98155838 2143 pbits = GET_MODE_BITSIZE (address_mode);
cfb75cdf 2144 if (HOST_BITS_PER_WIDE_INT > pbits)
2145 {
2146 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2147 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2148 >> shift);
2149 }
2150
2d0fd66d 2151 if (adjust_address)
cd358719 2152 {
2153 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2154 object, we can merge it into the LO_SUM. */
2155 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2156 && offset >= 0
2157 && (unsigned HOST_WIDE_INT) offset
2158 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
98155838 2159 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
29c05e22 2160 plus_constant (address_mode,
2161 XEXP (addr, 1), offset));
21b8bc7e 2162#ifdef POINTERS_EXTEND_UNSIGNED
2163 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2164 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2165 the fact that pointers are not allowed to overflow. */
2166 else if (POINTERS_EXTEND_UNSIGNED > 0
2167 && GET_CODE (addr) == ZERO_EXTEND
2168 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2169 && trunc_int_for_mode (offset, pointer_mode) == offset)
2170 addr = gen_rtx_ZERO_EXTEND (address_mode,
2171 plus_constant (pointer_mode,
2172 XEXP (addr, 0), offset));
2173#endif
cd358719 2174 else
29c05e22 2175 addr = plus_constant (address_mode, addr, offset);
cd358719 2176 }
fb257ae6 2177
5cc04e45 2178 new_rtx = change_address_1 (memref, mode, addr, validate, false);
96216d37 2179
e077413c 2180 /* If the address is a REG, change_address_1 rightfully returns memref,
2181 but this would destroy memref's MEM_ATTRS. */
2182 if (new_rtx == memref && offset != 0)
2183 new_rtx = copy_rtx (new_rtx);
2184
2d0fd66d 2185 /* Conservatively drop the object if we don't know where we start from. */
2186 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2187 {
2188 attrs.expr = NULL_TREE;
2189 attrs.alias = 0;
2190 }
2191
96216d37 2192 /* Compute the new values of the memory attributes due to this adjustment.
2193 We add the offsets and update the alignment. */
6d58bcba 2194 if (attrs.offset_known_p)
2d0fd66d 2195 {
2196 attrs.offset += offset;
2197
2198 /* Drop the object if the new left end is not within its bounds. */
2199 if (adjust_object && attrs.offset < 0)
2200 {
2201 attrs.expr = NULL_TREE;
2202 attrs.alias = 0;
2203 }
2204 }
96216d37 2205
b8098e5b 2206 /* Compute the new alignment by taking the MIN of the alignment and the
2207 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2208 if zero. */
2209 if (offset != 0)
d72886b5 2210 {
2211 max_align = (offset & -offset) * BITS_PER_UNIT;
2212 attrs.align = MIN (attrs.align, max_align);
2213 }
96216d37 2214
226c6baf 2215 if (size)
6d58bcba 2216 {
2d0fd66d 2217 /* Drop the object if the new right end is not within its bounds. */
226c6baf 2218 if (adjust_object && (offset + size) > attrs.size)
2d0fd66d 2219 {
2220 attrs.expr = NULL_TREE;
2221 attrs.alias = 0;
2222 }
6d58bcba 2223 attrs.size_known_p = true;
226c6baf 2224 attrs.size = size;
6d58bcba 2225 }
2226 else if (attrs.size_known_p)
2d0fd66d 2227 {
226c6baf 2228 gcc_assert (!adjust_object);
2d0fd66d 2229 attrs.size -= offset;
226c6baf 2230 /* ??? The store_by_pieces machinery generates negative sizes,
2231 so don't assert for that here. */
2d0fd66d 2232 }
5cc193e7 2233
d72886b5 2234 set_mem_attrs (new_rtx, &attrs);
96216d37 2235
9ce37fa7 2236 return new_rtx;
e4e86ec5 2237}
2238
bf42c62d 2239/* Return a memory reference like MEMREF, but with its mode changed
2240 to MODE and its address changed to ADDR, which is assumed to be
f0b5f617 2241 MEMREF offset by OFFSET bytes. If VALIDATE is
bf42c62d 2242 nonzero, the memory address is forced to be valid. */
2243
2244rtx
35cb5232 2245adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2246 HOST_WIDE_INT offset, int validate)
bf42c62d 2247{
5cc04e45 2248 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
226c6baf 2249 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
bf42c62d 2250}
2251
2a631e19 2252/* Return a memory reference like MEMREF, but whose address is changed by
2253 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2254 known to be in OFFSET (possibly 1). */
fcdc122e 2255
2256rtx
35cb5232 2257offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
fcdc122e 2258{
9ce37fa7 2259 rtx new_rtx, addr = XEXP (memref, 0);
d72886b5 2260 enum machine_mode address_mode;
6d58bcba 2261 struct mem_attrs attrs, *defattrs;
fac6aae6 2262
d72886b5 2263 attrs = *get_mem_attrs (memref);
87cf5753 2264 address_mode = get_address_mode (memref);
98155838 2265 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2266
d4c5e26d 2267 /* At this point we don't know _why_ the address is invalid. It
917bbcab 2268 could have secondary memory references, multiplies or anything.
fac6aae6 2269
2270 However, if we did go and rearrange things, we can wind up not
2271 being able to recognize the magic around pic_offset_table_rtx.
2272 This stuff is fragile, and is yet another example of why it is
2273 bad to expose PIC machinery too early. */
d72886b5 2274 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2275 attrs.addrspace)
fac6aae6 2276 && GET_CODE (addr) == PLUS
2277 && XEXP (addr, 0) == pic_offset_table_rtx)
2278 {
2279 addr = force_reg (GET_MODE (addr), addr);
98155838 2280 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2281 }
2282
9ce37fa7 2283 update_temp_slot_address (XEXP (memref, 0), new_rtx);
5cc04e45 2284 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
fcdc122e 2285
d28edf0d 2286 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2287 if (new_rtx == memref)
2288 return new_rtx;
d28edf0d 2289
fcdc122e 2290 /* Update the alignment to reflect the offset. Reset the offset, which
2291 we don't know. */
6d58bcba 2292 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2293 attrs.offset_known_p = false;
2294 attrs.size_known_p = defattrs->size_known_p;
2295 attrs.size = defattrs->size;
d72886b5 2296 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2297 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2298 return new_rtx;
fcdc122e 2299}
d4c5e26d 2300
537ffcfc 2301/* Return a memory reference like MEMREF, but with its address changed to
2302 ADDR. The caller is asserting that the actual piece of memory pointed
2303 to is the same, just the form of the address is being changed, such as
5cc04e45 2304 by putting something into a register. INPLACE is true if any changes
2305 can be made directly to MEMREF or false if MEMREF must be treated as
2306 immutable. */
537ffcfc 2307
2308rtx
5cc04e45 2309replace_equiv_address (rtx memref, rtx addr, bool inplace)
537ffcfc 2310{
96216d37 2311 /* change_address_1 copies the memory attribute structure without change
2312 and that's exactly what we want here. */
ecfe4ca9 2313 update_temp_slot_address (XEXP (memref, 0), addr);
5cc04e45 2314 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
537ffcfc 2315}
96216d37 2316
e4e86ec5 2317/* Likewise, but the reference is not required to be valid. */
2318
2319rtx
5cc04e45 2320replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
e4e86ec5 2321{
5cc04e45 2322 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
e4e86ec5 2323}
8259ab07 2324
2325/* Return a memory reference like MEMREF, but with its mode widened to
2326 MODE and offset by OFFSET. This would be used by targets that e.g.
2327 cannot issue QImode memory operations and have to use SImode memory
2328 operations plus masking logic. */
2329
2330rtx
35cb5232 2331widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
8259ab07 2332{
226c6baf 2333 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
d72886b5 2334 struct mem_attrs attrs;
8259ab07 2335 unsigned int size = GET_MODE_SIZE (mode);
2336
d28edf0d 2337 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2338 if (new_rtx == memref)
2339 return new_rtx;
d28edf0d 2340
d72886b5 2341 attrs = *get_mem_attrs (new_rtx);
2342
8259ab07 2343 /* If we don't know what offset we were at within the expression, then
2344 we can't know if we've overstepped the bounds. */
6d58bcba 2345 if (! attrs.offset_known_p)
d72886b5 2346 attrs.expr = NULL_TREE;
8259ab07 2347
d72886b5 2348 while (attrs.expr)
8259ab07 2349 {
d72886b5 2350 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
8259ab07 2351 {
d72886b5 2352 tree field = TREE_OPERAND (attrs.expr, 1);
2353 tree offset = component_ref_field_offset (attrs.expr);
8259ab07 2354
2355 if (! DECL_SIZE_UNIT (field))
2356 {
d72886b5 2357 attrs.expr = NULL_TREE;
8259ab07 2358 break;
2359 }
2360
2361 /* Is the field at least as large as the access? If so, ok,
2362 otherwise strip back to the containing structure. */
8359cfb4 2363 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2364 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
6d58bcba 2365 && attrs.offset >= 0)
8259ab07 2366 break;
2367
e913b5cd 2368 if (! tree_fits_uhwi_p (offset))
8259ab07 2369 {
d72886b5 2370 attrs.expr = NULL_TREE;
8259ab07 2371 break;
2372 }
2373
d72886b5 2374 attrs.expr = TREE_OPERAND (attrs.expr, 0);
e913b5cd 2375 attrs.offset += tree_to_uhwi (offset);
2376 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
6d58bcba 2377 / BITS_PER_UNIT);
8259ab07 2378 }
2379 /* Similarly for the decl. */
d72886b5 2380 else if (DECL_P (attrs.expr)
2381 && DECL_SIZE_UNIT (attrs.expr)
2382 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2383 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
6d58bcba 2384 && (! attrs.offset_known_p || attrs.offset >= 0))
8259ab07 2385 break;
2386 else
2387 {
2388 /* The widened memory access overflows the expression, which means
2389 that it could alias another expression. Zap it. */
d72886b5 2390 attrs.expr = NULL_TREE;
8259ab07 2391 break;
2392 }
2393 }
2394
d72886b5 2395 if (! attrs.expr)
6d58bcba 2396 attrs.offset_known_p = false;
8259ab07 2397
2398 /* The widened memory may alias other stuff, so zap the alias set. */
2399 /* ??? Maybe use get_alias_set on any remaining expression. */
d72886b5 2400 attrs.alias = 0;
6d58bcba 2401 attrs.size_known_p = true;
2402 attrs.size = size;
d72886b5 2403 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2404 return new_rtx;
8259ab07 2405}
15bbde2b 2406\f
ac681e84 2407/* A fake decl that is used as the MEM_EXPR of spill slots. */
2408static GTY(()) tree spill_slot_decl;
2409
58029e61 2410tree
2411get_spill_slot_decl (bool force_build_p)
ac681e84 2412{
2413 tree d = spill_slot_decl;
2414 rtx rd;
d72886b5 2415 struct mem_attrs attrs;
ac681e84 2416
58029e61 2417 if (d || !force_build_p)
ac681e84 2418 return d;
2419
e60a6f7b 2420 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2421 VAR_DECL, get_identifier ("%sfp"), void_type_node);
ac681e84 2422 DECL_ARTIFICIAL (d) = 1;
2423 DECL_IGNORED_P (d) = 1;
2424 TREE_USED (d) = 1;
ac681e84 2425 spill_slot_decl = d;
2426
2427 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2428 MEM_NOTRAP_P (rd) = 1;
d72886b5 2429 attrs = *mode_mem_attrs[(int) BLKmode];
2430 attrs.alias = new_alias_set ();
2431 attrs.expr = d;
2432 set_mem_attrs (rd, &attrs);
ac681e84 2433 SET_DECL_RTL (d, rd);
2434
2435 return d;
2436}
2437
2438/* Given MEM, a result from assign_stack_local, fill in the memory
2439 attributes as appropriate for a register allocator spill slot.
2440 These slots are not aliasable by other memory. We arrange for
2441 them all to use a single MEM_EXPR, so that the aliasing code can
2442 work properly in the case of shared spill slots. */
2443
2444void
2445set_mem_attrs_for_spill (rtx mem)
2446{
d72886b5 2447 struct mem_attrs attrs;
2448 rtx addr;
ac681e84 2449
d72886b5 2450 attrs = *get_mem_attrs (mem);
2451 attrs.expr = get_spill_slot_decl (true);
2452 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2453 attrs.addrspace = ADDR_SPACE_GENERIC;
ac681e84 2454
2455 /* We expect the incoming memory to be of the form:
2456 (mem:MODE (plus (reg sfp) (const_int offset)))
2457 with perhaps the plus missing for offset = 0. */
2458 addr = XEXP (mem, 0);
6d58bcba 2459 attrs.offset_known_p = true;
2460 attrs.offset = 0;
ac681e84 2461 if (GET_CODE (addr) == PLUS
971ba038 2462 && CONST_INT_P (XEXP (addr, 1)))
6d58bcba 2463 attrs.offset = INTVAL (XEXP (addr, 1));
ac681e84 2464
d72886b5 2465 set_mem_attrs (mem, &attrs);
ac681e84 2466 MEM_NOTRAP_P (mem) = 1;
2467}
2468\f
15bbde2b 2469/* Return a newly created CODE_LABEL rtx with a unique label number. */
2470
2471rtx
35cb5232 2472gen_label_rtx (void)
15bbde2b 2473{
5cda2bd0 2474 return gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
35cb5232 2475 NULL, label_num++, NULL);
15bbde2b 2476}
2477\f
2478/* For procedure integration. */
2479
15bbde2b 2480/* Install new pointers to the first and last insns in the chain.
d4c332ff 2481 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2482 Used for an inline-procedure after copying the insn chain. */
2483
2484void
35cb5232 2485set_new_first_and_last_insn (rtx first, rtx last)
15bbde2b 2486{
d4c332ff 2487 rtx insn;
2488
06f9d6ef 2489 set_first_insn (first);
2490 set_last_insn (last);
d4c332ff 2491 cur_insn_uid = 0;
2492
9845d120 2493 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2494 {
2495 int debug_count = 0;
2496
2497 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2498 cur_debug_insn_uid = 0;
2499
2500 for (insn = first; insn; insn = NEXT_INSN (insn))
2501 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2502 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2503 else
2504 {
2505 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2506 if (DEBUG_INSN_P (insn))
2507 debug_count++;
2508 }
2509
2510 if (debug_count)
2511 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2512 else
2513 cur_debug_insn_uid++;
2514 }
2515 else
2516 for (insn = first; insn; insn = NEXT_INSN (insn))
2517 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
d4c332ff 2518
2519 cur_insn_uid++;
15bbde2b 2520}
15bbde2b 2521\f
d823ba47 2522/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2523 structure. This routine should only be called once. */
15bbde2b 2524
a40c0eeb 2525static void
df329266 2526unshare_all_rtl_1 (rtx insn)
15bbde2b 2527{
2d96a59a 2528 /* Unshare just about everything else. */
1cd4cfea 2529 unshare_all_rtl_in_chain (insn);
d823ba47 2530
15bbde2b 2531 /* Make sure the addresses of stack slots found outside the insn chain
2532 (such as, in DECL_RTL of a variable) are not shared
2533 with the insn chain.
2534
2535 This special care is necessary when the stack slot MEM does not
2536 actually appear in the insn chain. If it does appear, its address
2537 is unshared from all else at that point. */
45733446 2538 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
15bbde2b 2539}
2540
d823ba47 2541/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2542 structure, again. This is a fairly expensive thing to do so it
2543 should be done sparingly. */
2544
2545void
35cb5232 2546unshare_all_rtl_again (rtx insn)
2d96a59a 2547{
2548 rtx p;
5244079b 2549 tree decl;
2550
2d96a59a 2551 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2552 if (INSN_P (p))
2d96a59a 2553 {
2554 reset_used_flags (PATTERN (p));
2555 reset_used_flags (REG_NOTES (p));
6d2a4bac 2556 if (CALL_P (p))
2557 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2d96a59a 2558 }
5244079b 2559
01dc9f0c 2560 /* Make sure that virtual stack slots are not shared. */
265be050 2561 set_used_decls (DECL_INITIAL (cfun->decl));
01dc9f0c 2562
5244079b 2563 /* Make sure that virtual parameters are not shared. */
1767a056 2564 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
265be050 2565 set_used_flags (DECL_RTL (decl));
5244079b 2566
2567 reset_used_flags (stack_slot_list);
2568
df329266 2569 unshare_all_rtl_1 (insn);
a40c0eeb 2570}
2571
2a1990e9 2572unsigned int
a40c0eeb 2573unshare_all_rtl (void)
2574{
df329266 2575 unshare_all_rtl_1 (get_insns ());
2a1990e9 2576 return 0;
2d96a59a 2577}
2578
77fce4cd 2579
1cd4cfea 2580/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2581 Recursively does the same for subexpressions. */
2582
2583static void
2584verify_rtx_sharing (rtx orig, rtx insn)
2585{
2586 rtx x = orig;
2587 int i;
2588 enum rtx_code code;
2589 const char *format_ptr;
2590
2591 if (x == 0)
2592 return;
2593
2594 code = GET_CODE (x);
2595
2596 /* These types may be freely shared. */
2597
2598 switch (code)
2599 {
2600 case REG:
688ff29b 2601 case DEBUG_EXPR:
2602 case VALUE:
0349edce 2603 CASE_CONST_ANY:
1cd4cfea 2604 case SYMBOL_REF:
2605 case LABEL_REF:
2606 case CODE_LABEL:
2607 case PC:
2608 case CC0:
1a860023 2609 case RETURN:
9cb2517e 2610 case SIMPLE_RETURN:
1cd4cfea 2611 case SCRATCH:
c09425a0 2612 /* SCRATCH must be shared because they represent distinct values. */
b291008a 2613 return;
c09425a0 2614 case CLOBBER:
b291008a 2615 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2616 clobbers or clobbers of hard registers that originated as pseudos.
2617 This is needed to allow safe register renaming. */
2618 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2619 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
c09425a0 2620 return;
2621 break;
1cd4cfea 2622
2623 case CONST:
3072d30e 2624 if (shared_const_p (orig))
1cd4cfea 2625 return;
2626 break;
2627
2628 case MEM:
2629 /* A MEM is allowed to be shared if its address is constant. */
2630 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2631 || reload_completed || reload_in_progress)
2632 return;
2633
2634 break;
2635
2636 default:
2637 break;
2638 }
2639
2640 /* This rtx may not be shared. If it has already been seen,
2641 replace it with a copy of itself. */
9cee7c3f 2642#ifdef ENABLE_CHECKING
1cd4cfea 2643 if (RTX_FLAG (x, used))
2644 {
0a81f5a0 2645 error ("invalid rtl sharing found in the insn");
1cd4cfea 2646 debug_rtx (insn);
0a81f5a0 2647 error ("shared rtx");
1cd4cfea 2648 debug_rtx (x);
0a81f5a0 2649 internal_error ("internal consistency failure");
1cd4cfea 2650 }
9cee7c3f 2651#endif
2652 gcc_assert (!RTX_FLAG (x, used));
48e1416a 2653
1cd4cfea 2654 RTX_FLAG (x, used) = 1;
2655
8b332087 2656 /* Now scan the subexpressions recursively. */
1cd4cfea 2657
2658 format_ptr = GET_RTX_FORMAT (code);
2659
2660 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2661 {
2662 switch (*format_ptr++)
2663 {
2664 case 'e':
2665 verify_rtx_sharing (XEXP (x, i), insn);
2666 break;
2667
2668 case 'E':
2669 if (XVEC (x, i) != NULL)
2670 {
2671 int j;
2672 int len = XVECLEN (x, i);
2673
2674 for (j = 0; j < len; j++)
2675 {
9cee7c3f 2676 /* We allow sharing of ASM_OPERANDS inside single
2677 instruction. */
1cd4cfea 2678 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
9cee7c3f 2679 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2680 == ASM_OPERANDS))
1cd4cfea 2681 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2682 else
2683 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2684 }
2685 }
2686 break;
2687 }
2688 }
2689 return;
2690}
2691
1e9af25c 2692/* Reset used-flags for INSN. */
2693
2694static void
2695reset_insn_used_flags (rtx insn)
2696{
2697 gcc_assert (INSN_P (insn));
2698 reset_used_flags (PATTERN (insn));
2699 reset_used_flags (REG_NOTES (insn));
2700 if (CALL_P (insn))
2701 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2702}
2703
7cdd84a2 2704/* Go through all the RTL insn bodies and clear all the USED bits. */
1cd4cfea 2705
7cdd84a2 2706static void
2707reset_all_used_flags (void)
1cd4cfea 2708{
2709 rtx p;
2710
2711 for (p = get_insns (); p; p = NEXT_INSN (p))
2712 if (INSN_P (p))
2713 {
1e9af25c 2714 rtx pat = PATTERN (p);
2715 if (GET_CODE (pat) != SEQUENCE)
2716 reset_insn_used_flags (p);
2717 else
764f640f 2718 {
1e9af25c 2719 gcc_assert (REG_NOTES (p) == NULL);
2720 for (int i = 0; i < XVECLEN (pat, 0); i++)
2721 reset_insn_used_flags (XVECEXP (pat, 0, i));
764f640f 2722 }
1cd4cfea 2723 }
7cdd84a2 2724}
2725
1e9af25c 2726/* Verify sharing in INSN. */
2727
2728static void
2729verify_insn_sharing (rtx insn)
2730{
2731 gcc_assert (INSN_P (insn));
2732 reset_used_flags (PATTERN (insn));
2733 reset_used_flags (REG_NOTES (insn));
2734 if (CALL_P (insn))
2735 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2736}
2737
7cdd84a2 2738/* Go through all the RTL insn bodies and check that there is no unexpected
2739 sharing in between the subexpressions. */
2740
2741DEBUG_FUNCTION void
2742verify_rtl_sharing (void)
2743{
2744 rtx p;
2745
2746 timevar_push (TV_VERIFY_RTL_SHARING);
2747
2748 reset_all_used_flags ();
1cd4cfea 2749
2750 for (p = get_insns (); p; p = NEXT_INSN (p))
2751 if (INSN_P (p))
2752 {
1e9af25c 2753 rtx pat = PATTERN (p);
2754 if (GET_CODE (pat) != SEQUENCE)
2755 verify_insn_sharing (p);
2756 else
2757 for (int i = 0; i < XVECLEN (pat, 0); i++)
2758 verify_insn_sharing (XVECEXP (pat, 0, i));
1cd4cfea 2759 }
4b366dd3 2760
7cdd84a2 2761 reset_all_used_flags ();
2762
4b366dd3 2763 timevar_pop (TV_VERIFY_RTL_SHARING);
1cd4cfea 2764}
2765
2d96a59a 2766/* Go through all the RTL insn bodies and copy any invalid shared structure.
2767 Assumes the mark bits are cleared at entry. */
2768
1cd4cfea 2769void
2770unshare_all_rtl_in_chain (rtx insn)
2d96a59a 2771{
2772 for (; insn; insn = NEXT_INSN (insn))
9204e736 2773 if (INSN_P (insn))
2d96a59a 2774 {
2775 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2776 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
6d2a4bac 2777 if (CALL_P (insn))
2778 CALL_INSN_FUNCTION_USAGE (insn)
2779 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2d96a59a 2780 }
2781}
2782
01dc9f0c 2783/* Go through all virtual stack slots of a function and mark them as
265be050 2784 shared. We never replace the DECL_RTLs themselves with a copy,
2785 but expressions mentioned into a DECL_RTL cannot be shared with
2786 expressions in the instruction stream.
2787
2788 Note that reload may convert pseudo registers into memories in-place.
2789 Pseudo registers are always shared, but MEMs never are. Thus if we
2790 reset the used flags on MEMs in the instruction stream, we must set
2791 them again on MEMs that appear in DECL_RTLs. */
2792
01dc9f0c 2793static void
265be050 2794set_used_decls (tree blk)
01dc9f0c 2795{
2796 tree t;
2797
2798 /* Mark decls. */
1767a056 2799 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
0e8e37b2 2800 if (DECL_RTL_SET_P (t))
265be050 2801 set_used_flags (DECL_RTL (t));
01dc9f0c 2802
2803 /* Now process sub-blocks. */
93110716 2804 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
265be050 2805 set_used_decls (t);
01dc9f0c 2806}
2807
15bbde2b 2808/* Mark ORIG as in use, and return a copy of it if it was already in use.
7ba6ce7a 2809 Recursively does the same for subexpressions. Uses
2810 copy_rtx_if_shared_1 to reduce stack space. */
15bbde2b 2811
2812rtx
35cb5232 2813copy_rtx_if_shared (rtx orig)
15bbde2b 2814{
0e0727c4 2815 copy_rtx_if_shared_1 (&orig);
2816 return orig;
2817}
2818
7ba6ce7a 2819/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2820 use. Recursively does the same for subexpressions. */
2821
0e0727c4 2822static void
2823copy_rtx_if_shared_1 (rtx *orig1)
2824{
2825 rtx x;
19cb6b50 2826 int i;
2827 enum rtx_code code;
0e0727c4 2828 rtx *last_ptr;
19cb6b50 2829 const char *format_ptr;
15bbde2b 2830 int copied = 0;
0e0727c4 2831 int length;
2832
2833 /* Repeat is used to turn tail-recursion into iteration. */
2834repeat:
2835 x = *orig1;
15bbde2b 2836
2837 if (x == 0)
0e0727c4 2838 return;
15bbde2b 2839
2840 code = GET_CODE (x);
2841
2842 /* These types may be freely shared. */
2843
2844 switch (code)
2845 {
2846 case REG:
688ff29b 2847 case DEBUG_EXPR:
2848 case VALUE:
0349edce 2849 CASE_CONST_ANY:
15bbde2b 2850 case SYMBOL_REF:
1cd4cfea 2851 case LABEL_REF:
15bbde2b 2852 case CODE_LABEL:
2853 case PC:
2854 case CC0:
e0691b9a 2855 case RETURN:
9cb2517e 2856 case SIMPLE_RETURN:
15bbde2b 2857 case SCRATCH:
a92771b8 2858 /* SCRATCH must be shared because they represent distinct values. */
0e0727c4 2859 return;
c09425a0 2860 case CLOBBER:
b291008a 2861 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2862 clobbers or clobbers of hard registers that originated as pseudos.
2863 This is needed to allow safe register renaming. */
2864 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2865 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
c09425a0 2866 return;
2867 break;
15bbde2b 2868
f63d12e3 2869 case CONST:
3072d30e 2870 if (shared_const_p (x))
0e0727c4 2871 return;
f63d12e3 2872 break;
2873
9845d120 2874 case DEBUG_INSN:
15bbde2b 2875 case INSN:
2876 case JUMP_INSN:
2877 case CALL_INSN:
2878 case NOTE:
15bbde2b 2879 case BARRIER:
2880 /* The chain of insns is not being copied. */
0e0727c4 2881 return;
15bbde2b 2882
0dbd1c74 2883 default:
2884 break;
15bbde2b 2885 }
2886
2887 /* This rtx may not be shared. If it has already been seen,
2888 replace it with a copy of itself. */
2889
7c25cb91 2890 if (RTX_FLAG (x, used))
15bbde2b 2891 {
f2d0e9f1 2892 x = shallow_copy_rtx (x);
15bbde2b 2893 copied = 1;
2894 }
7c25cb91 2895 RTX_FLAG (x, used) = 1;
15bbde2b 2896
2897 /* Now scan the subexpressions recursively.
2898 We can store any replaced subexpressions directly into X
2899 since we know X is not shared! Any vectors in X
2900 must be copied if X was copied. */
2901
2902 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2903 length = GET_RTX_LENGTH (code);
2904 last_ptr = NULL;
48e1416a 2905
0e0727c4 2906 for (i = 0; i < length; i++)
15bbde2b 2907 {
2908 switch (*format_ptr++)
2909 {
2910 case 'e':
0e0727c4 2911 if (last_ptr)
2912 copy_rtx_if_shared_1 (last_ptr);
2913 last_ptr = &XEXP (x, i);
15bbde2b 2914 break;
2915
2916 case 'E':
2917 if (XVEC (x, i) != NULL)
2918 {
19cb6b50 2919 int j;
ffe0869b 2920 int len = XVECLEN (x, i);
48e1416a 2921
8b332087 2922 /* Copy the vector iff I copied the rtx and the length
2923 is nonzero. */
ffe0869b 2924 if (copied && len > 0)
a4070a91 2925 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
48e1416a 2926
d632b59a 2927 /* Call recursively on all inside the vector. */
ffe0869b 2928 for (j = 0; j < len; j++)
0e0727c4 2929 {
2930 if (last_ptr)
2931 copy_rtx_if_shared_1 (last_ptr);
2932 last_ptr = &XVECEXP (x, i, j);
2933 }
15bbde2b 2934 }
2935 break;
2936 }
2937 }
0e0727c4 2938 *orig1 = x;
2939 if (last_ptr)
2940 {
2941 orig1 = last_ptr;
2942 goto repeat;
2943 }
2944 return;
15bbde2b 2945}
2946
709947e6 2947/* Set the USED bit in X and its non-shareable subparts to FLAG. */
15bbde2b 2948
709947e6 2949static void
2950mark_used_flags (rtx x, int flag)
15bbde2b 2951{
19cb6b50 2952 int i, j;
2953 enum rtx_code code;
2954 const char *format_ptr;
0e0727c4 2955 int length;
15bbde2b 2956
0e0727c4 2957 /* Repeat is used to turn tail-recursion into iteration. */
2958repeat:
15bbde2b 2959 if (x == 0)
2960 return;
2961
2962 code = GET_CODE (x);
2963
c3418f42 2964 /* These types may be freely shared so we needn't do any resetting
15bbde2b 2965 for them. */
2966
2967 switch (code)
2968 {
2969 case REG:
688ff29b 2970 case DEBUG_EXPR:
2971 case VALUE:
0349edce 2972 CASE_CONST_ANY:
15bbde2b 2973 case SYMBOL_REF:
2974 case CODE_LABEL:
2975 case PC:
2976 case CC0:
e0691b9a 2977 case RETURN:
9cb2517e 2978 case SIMPLE_RETURN:
15bbde2b 2979 return;
2980
9845d120 2981 case DEBUG_INSN:
15bbde2b 2982 case INSN:
2983 case JUMP_INSN:
2984 case CALL_INSN:
2985 case NOTE:
2986 case LABEL_REF:
2987 case BARRIER:
2988 /* The chain of insns is not being copied. */
2989 return;
d823ba47 2990
0dbd1c74 2991 default:
2992 break;
15bbde2b 2993 }
2994
709947e6 2995 RTX_FLAG (x, used) = flag;
15bbde2b 2996
2997 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2998 length = GET_RTX_LENGTH (code);
48e1416a 2999
0e0727c4 3000 for (i = 0; i < length; i++)
15bbde2b 3001 {
3002 switch (*format_ptr++)
3003 {
3004 case 'e':
0e0727c4 3005 if (i == length-1)
3006 {
3007 x = XEXP (x, i);
3008 goto repeat;
3009 }
709947e6 3010 mark_used_flags (XEXP (x, i), flag);
15bbde2b 3011 break;
3012
3013 case 'E':
3014 for (j = 0; j < XVECLEN (x, i); j++)
709947e6 3015 mark_used_flags (XVECEXP (x, i, j), flag);
15bbde2b 3016 break;
3017 }
3018 }
3019}
1cd4cfea 3020
709947e6 3021/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1cd4cfea 3022 to look for shared sub-parts. */
3023
3024void
709947e6 3025reset_used_flags (rtx x)
1cd4cfea 3026{
709947e6 3027 mark_used_flags (x, 0);
3028}
1cd4cfea 3029
709947e6 3030/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3031 to look for shared sub-parts. */
1cd4cfea 3032
709947e6 3033void
3034set_used_flags (rtx x)
3035{
3036 mark_used_flags (x, 1);
1cd4cfea 3037}
15bbde2b 3038\f
3039/* Copy X if necessary so that it won't be altered by changes in OTHER.
3040 Return X or the rtx for the pseudo reg the value of X was copied into.
3041 OTHER must be valid as a SET_DEST. */
3042
3043rtx
35cb5232 3044make_safe_from (rtx x, rtx other)
15bbde2b 3045{
3046 while (1)
3047 switch (GET_CODE (other))
3048 {
3049 case SUBREG:
3050 other = SUBREG_REG (other);
3051 break;
3052 case STRICT_LOW_PART:
3053 case SIGN_EXTEND:
3054 case ZERO_EXTEND:
3055 other = XEXP (other, 0);
3056 break;
3057 default:
3058 goto done;
3059 }
3060 done:
e16ceb8e 3061 if ((MEM_P (other)
15bbde2b 3062 && ! CONSTANT_P (x)
8ad4c111 3063 && !REG_P (x)
15bbde2b 3064 && GET_CODE (x) != SUBREG)
8ad4c111 3065 || (REG_P (other)
15bbde2b 3066 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3067 || reg_mentioned_p (other, x))))
3068 {
3069 rtx temp = gen_reg_rtx (GET_MODE (x));
3070 emit_move_insn (temp, x);
3071 return temp;
3072 }
3073 return x;
3074}
3075\f
3076/* Emission of insns (adding them to the doubly-linked list). */
3077
15bbde2b 3078/* Return the last insn emitted, even if it is in a sequence now pushed. */
3079
3080rtx
35cb5232 3081get_last_insn_anywhere (void)
15bbde2b 3082{
3083 struct sequence_stack *stack;
06f9d6ef 3084 if (get_last_insn ())
3085 return get_last_insn ();
0a893c29 3086 for (stack = seq_stack; stack; stack = stack->next)
15bbde2b 3087 if (stack->last != 0)
3088 return stack->last;
3089 return 0;
3090}
3091
70545de4 3092/* Return the first nonnote insn emitted in current sequence or current
3093 function. This routine looks inside SEQUENCEs. */
3094
3095rtx
35cb5232 3096get_first_nonnote_insn (void)
70545de4 3097{
06f9d6ef 3098 rtx insn = get_insns ();
f86e856e 3099
3100 if (insn)
3101 {
3102 if (NOTE_P (insn))
3103 for (insn = next_insn (insn);
3104 insn && NOTE_P (insn);
3105 insn = next_insn (insn))
3106 continue;
3107 else
3108 {
1c14a50e 3109 if (NONJUMP_INSN_P (insn)
f86e856e 3110 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3111 insn = XVECEXP (PATTERN (insn), 0, 0);
3112 }
3113 }
70545de4 3114
3115 return insn;
3116}
3117
3118/* Return the last nonnote insn emitted in current sequence or current
3119 function. This routine looks inside SEQUENCEs. */
3120
3121rtx
35cb5232 3122get_last_nonnote_insn (void)
70545de4 3123{
06f9d6ef 3124 rtx insn = get_last_insn ();
f86e856e 3125
3126 if (insn)
3127 {
3128 if (NOTE_P (insn))
3129 for (insn = previous_insn (insn);
3130 insn && NOTE_P (insn);
3131 insn = previous_insn (insn))
3132 continue;
3133 else
3134 {
1c14a50e 3135 if (NONJUMP_INSN_P (insn)
f86e856e 3136 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3137 insn = XVECEXP (PATTERN (insn), 0,
3138 XVECLEN (PATTERN (insn), 0) - 1);
3139 }
3140 }
70545de4 3141
3142 return insn;
3143}
3144
9845d120 3145/* Return the number of actual (non-debug) insns emitted in this
3146 function. */
3147
3148int
3149get_max_insn_count (void)
3150{
3151 int n = cur_insn_uid;
3152
3153 /* The table size must be stable across -g, to avoid codegen
3154 differences due to debug insns, and not be affected by
3155 -fmin-insn-uid, to avoid excessive table size and to simplify
3156 debugging of -fcompare-debug failures. */
3157 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3158 n -= cur_debug_insn_uid;
3159 else
3160 n -= MIN_NONDEBUG_INSN_UID;
3161
3162 return n;
3163}
3164
15bbde2b 3165\f
3166/* Return the next insn. If it is a SEQUENCE, return the first insn
3167 of the sequence. */
3168
3169rtx
35cb5232 3170next_insn (rtx insn)
15bbde2b 3171{
ce4469fa 3172 if (insn)
3173 {
3174 insn = NEXT_INSN (insn);
3175 if (insn && NONJUMP_INSN_P (insn)
3176 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3177 insn = XVECEXP (PATTERN (insn), 0, 0);
3178 }
15bbde2b 3179
ce4469fa 3180 return insn;
15bbde2b 3181}
3182
3183/* Return the previous insn. If it is a SEQUENCE, return the last insn
3184 of the sequence. */
3185
3186rtx
35cb5232 3187previous_insn (rtx insn)
15bbde2b 3188{
ce4469fa 3189 if (insn)
3190 {
3191 insn = PREV_INSN (insn);
3192 if (insn && NONJUMP_INSN_P (insn)
3193 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3194 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3195 }
15bbde2b 3196
ce4469fa 3197 return insn;
15bbde2b 3198}
3199
3200/* Return the next insn after INSN that is not a NOTE. This routine does not
3201 look inside SEQUENCEs. */
3202
3203rtx
35cb5232 3204next_nonnote_insn (rtx insn)
15bbde2b 3205{
ce4469fa 3206 while (insn)
3207 {
3208 insn = NEXT_INSN (insn);
3209 if (insn == 0 || !NOTE_P (insn))
3210 break;
3211 }
15bbde2b 3212
ce4469fa 3213 return insn;
15bbde2b 3214}
3215
c4d13c5c 3216/* Return the next insn after INSN that is not a NOTE, but stop the
3217 search before we enter another basic block. This routine does not
3218 look inside SEQUENCEs. */
3219
3220rtx
3221next_nonnote_insn_bb (rtx insn)
3222{
3223 while (insn)
3224 {
3225 insn = NEXT_INSN (insn);
3226 if (insn == 0 || !NOTE_P (insn))
3227 break;
3228 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3229 return NULL_RTX;
3230 }
3231
3232 return insn;
3233}
3234
15bbde2b 3235/* Return the previous insn before INSN that is not a NOTE. This routine does
3236 not look inside SEQUENCEs. */
3237
3238rtx
35cb5232 3239prev_nonnote_insn (rtx insn)
15bbde2b 3240{
ce4469fa 3241 while (insn)
3242 {
3243 insn = PREV_INSN (insn);
3244 if (insn == 0 || !NOTE_P (insn))
3245 break;
3246 }
15bbde2b 3247
ce4469fa 3248 return insn;
15bbde2b 3249}
3250
bcc66782 3251/* Return the previous insn before INSN that is not a NOTE, but stop
3252 the search before we enter another basic block. This routine does
3253 not look inside SEQUENCEs. */
3254
3255rtx
3256prev_nonnote_insn_bb (rtx insn)
3257{
3258 while (insn)
3259 {
3260 insn = PREV_INSN (insn);
3261 if (insn == 0 || !NOTE_P (insn))
3262 break;
3263 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3264 return NULL_RTX;
3265 }
3266
3267 return insn;
3268}
3269
9845d120 3270/* Return the next insn after INSN that is not a DEBUG_INSN. This
3271 routine does not look inside SEQUENCEs. */
3272
3273rtx
3274next_nondebug_insn (rtx insn)
3275{
3276 while (insn)
3277 {
3278 insn = NEXT_INSN (insn);
3279 if (insn == 0 || !DEBUG_INSN_P (insn))
3280 break;
3281 }
3282
3283 return insn;
3284}
3285
3286/* Return the previous insn before INSN that is not a DEBUG_INSN.
3287 This routine does not look inside SEQUENCEs. */
3288
3289rtx
3290prev_nondebug_insn (rtx insn)
3291{
3292 while (insn)
3293 {
3294 insn = PREV_INSN (insn);
3295 if (insn == 0 || !DEBUG_INSN_P (insn))
3296 break;
3297 }
3298
3299 return insn;
3300}
3301
5b8537a8 3302/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3303 This routine does not look inside SEQUENCEs. */
3304
3305rtx
3306next_nonnote_nondebug_insn (rtx insn)
3307{
3308 while (insn)
3309 {
3310 insn = NEXT_INSN (insn);
3311 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3312 break;
3313 }
3314
3315 return insn;
3316}
3317
3318/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3319 This routine does not look inside SEQUENCEs. */
3320
3321rtx
3322prev_nonnote_nondebug_insn (rtx insn)
3323{
3324 while (insn)
3325 {
3326 insn = PREV_INSN (insn);
3327 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3328 break;
3329 }
3330
3331 return insn;
3332}
3333
15bbde2b 3334/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3335 or 0, if there is none. This routine does not look inside
a92771b8 3336 SEQUENCEs. */
15bbde2b 3337
3338rtx
35cb5232 3339next_real_insn (rtx insn)
15bbde2b 3340{
ce4469fa 3341 while (insn)
3342 {
3343 insn = NEXT_INSN (insn);
3344 if (insn == 0 || INSN_P (insn))
3345 break;
3346 }
15bbde2b 3347
ce4469fa 3348 return insn;
15bbde2b 3349}
3350
3351/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3352 or 0, if there is none. This routine does not look inside
3353 SEQUENCEs. */
3354
3355rtx
35cb5232 3356prev_real_insn (rtx insn)
15bbde2b 3357{
ce4469fa 3358 while (insn)
3359 {
3360 insn = PREV_INSN (insn);
3361 if (insn == 0 || INSN_P (insn))
3362 break;
3363 }
15bbde2b 3364
ce4469fa 3365 return insn;
15bbde2b 3366}
3367
d5f9786f 3368/* Return the last CALL_INSN in the current list, or 0 if there is none.
3369 This routine does not look inside SEQUENCEs. */
3370
3371rtx
35cb5232 3372last_call_insn (void)
d5f9786f 3373{
3374 rtx insn;
3375
3376 for (insn = get_last_insn ();
6d7dc5b9 3377 insn && !CALL_P (insn);
d5f9786f 3378 insn = PREV_INSN (insn))
3379 ;
3380
3381 return insn;
3382}
3383
15bbde2b 3384/* Find the next insn after INSN that really does something. This routine
084950ee 3385 does not look inside SEQUENCEs. After reload this also skips over
3386 standalone USE and CLOBBER insn. */
15bbde2b 3387
2215ca0d 3388int
52d07779 3389active_insn_p (const_rtx insn)
2215ca0d 3390{
6d7dc5b9 3391 return (CALL_P (insn) || JUMP_P (insn)
91f71fa3 3392 || JUMP_TABLE_DATA_P (insn) /* FIXME */
6d7dc5b9 3393 || (NONJUMP_INSN_P (insn)
3a66feab 3394 && (! reload_completed
3395 || (GET_CODE (PATTERN (insn)) != USE
3396 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3397}
3398
15bbde2b 3399rtx
35cb5232 3400next_active_insn (rtx insn)
15bbde2b 3401{
ce4469fa 3402 while (insn)
3403 {
3404 insn = NEXT_INSN (insn);
3405 if (insn == 0 || active_insn_p (insn))
3406 break;
3407 }
15bbde2b 3408
ce4469fa 3409 return insn;
15bbde2b 3410}
3411
3412/* Find the last insn before INSN that really does something. This routine
084950ee 3413 does not look inside SEQUENCEs. After reload this also skips over
3414 standalone USE and CLOBBER insn. */
15bbde2b 3415
3416rtx
35cb5232 3417prev_active_insn (rtx insn)
15bbde2b 3418{
ce4469fa 3419 while (insn)
3420 {
3421 insn = PREV_INSN (insn);
3422 if (insn == 0 || active_insn_p (insn))
3423 break;
3424 }
15bbde2b 3425
ce4469fa 3426 return insn;
15bbde2b 3427}
15bbde2b 3428\f
3429#ifdef HAVE_cc0
3430/* Return the next insn that uses CC0 after INSN, which is assumed to
3431 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3432 applied to the result of this function should yield INSN).
3433
3434 Normally, this is simply the next insn. However, if a REG_CC_USER note
3435 is present, it contains the insn that uses CC0.
3436
3437 Return 0 if we can't find the insn. */
3438
3439rtx
35cb5232 3440next_cc0_user (rtx insn)
15bbde2b 3441{
b572011e 3442 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3443
3444 if (note)
3445 return XEXP (note, 0);
3446
3447 insn = next_nonnote_insn (insn);
6d7dc5b9 3448 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
15bbde2b 3449 insn = XVECEXP (PATTERN (insn), 0, 0);
3450
9204e736 3451 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
15bbde2b 3452 return insn;
3453
3454 return 0;
3455}
3456
3457/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3458 note, it is the previous insn. */
3459
3460rtx
35cb5232 3461prev_cc0_setter (rtx insn)
15bbde2b 3462{
b572011e 3463 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3464
3465 if (note)
3466 return XEXP (note, 0);
3467
3468 insn = prev_nonnote_insn (insn);
611234b4 3469 gcc_assert (sets_cc0_p (PATTERN (insn)));
15bbde2b 3470
3471 return insn;
3472}
3473#endif
344dc2fa 3474
698ff1f0 3475#ifdef AUTO_INC_DEC
3476/* Find a RTX_AUTOINC class rtx which matches DATA. */
3477
3478static int
3479find_auto_inc (rtx *xp, void *data)
3480{
3481 rtx x = *xp;
225ab426 3482 rtx reg = (rtx) data;
698ff1f0 3483
3484 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3485 return 0;
3486
3487 switch (GET_CODE (x))
3488 {
3489 case PRE_DEC:
3490 case PRE_INC:
3491 case POST_DEC:
3492 case POST_INC:
3493 case PRE_MODIFY:
3494 case POST_MODIFY:
3495 if (rtx_equal_p (reg, XEXP (x, 0)))
3496 return 1;
3497 break;
3498
3499 default:
3500 gcc_unreachable ();
3501 }
3502 return -1;
3503}
3504#endif
3505
344dc2fa 3506/* Increment the label uses for all labels present in rtx. */
3507
3508static void
35cb5232 3509mark_label_nuses (rtx x)
344dc2fa 3510{
19cb6b50 3511 enum rtx_code code;
3512 int i, j;
3513 const char *fmt;
344dc2fa 3514
3515 code = GET_CODE (x);
a030d4a8 3516 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
344dc2fa 3517 LABEL_NUSES (XEXP (x, 0))++;
3518
3519 fmt = GET_RTX_FORMAT (code);
3520 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3521 {
3522 if (fmt[i] == 'e')
ff385626 3523 mark_label_nuses (XEXP (x, i));
344dc2fa 3524 else if (fmt[i] == 'E')
ff385626 3525 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3526 mark_label_nuses (XVECEXP (x, i, j));
3527 }
3528}
3529
15bbde2b 3530\f
3531/* Try splitting insns that can be split for better scheduling.
3532 PAT is the pattern which might split.
3533 TRIAL is the insn providing PAT.
6ef828f9 3534 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3535
3536 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3537 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3538 returns TRIAL. If the insn to be returned can be split, it will be. */
3539
3540rtx
35cb5232 3541try_split (rtx pat, rtx trial, int last)
15bbde2b 3542{
3543 rtx before = PREV_INSN (trial);
3544 rtx after = NEXT_INSN (trial);
15bbde2b 3545 int has_barrier = 0;
1e5b92fa 3546 rtx note, seq, tem;
3cd757b1 3547 int probability;
e13693ec 3548 rtx insn_last, insn;
3549 int njumps = 0;
2e3b0d0f 3550 rtx call_insn = NULL_RTX;
3cd757b1 3551
25e880b1 3552 /* We're not good at redistributing frame information. */
3553 if (RTX_FRAME_RELATED_P (trial))
3554 return trial;
3555
3cd757b1 3556 if (any_condjump_p (trial)
3557 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
9eb946de 3558 split_branch_probability = XINT (note, 0);
3cd757b1 3559 probability = split_branch_probability;
3560
3561 seq = split_insns (pat, trial);
3562
3563 split_branch_probability = -1;
15bbde2b 3564
3565 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3566 We may need to handle this specially. */
6d7dc5b9 3567 if (after && BARRIER_P (after))
15bbde2b 3568 {
3569 has_barrier = 1;
3570 after = NEXT_INSN (after);
3571 }
3572
e13693ec 3573 if (!seq)
3574 return trial;
3575
3576 /* Avoid infinite loop if any insn of the result matches
3577 the original pattern. */
3578 insn_last = seq;
3579 while (1)
15bbde2b 3580 {
e13693ec 3581 if (INSN_P (insn_last)
3582 && rtx_equal_p (PATTERN (insn_last), pat))
3583 return trial;
3584 if (!NEXT_INSN (insn_last))
3585 break;
3586 insn_last = NEXT_INSN (insn_last);
3587 }
d823ba47 3588
3072d30e 3589 /* We will be adding the new sequence to the function. The splitters
3590 may have introduced invalid RTL sharing, so unshare the sequence now. */
3591 unshare_all_rtl_in_chain (seq);
3592
e13693ec 3593 /* Mark labels. */
3594 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3595 {
6d7dc5b9 3596 if (JUMP_P (insn))
e13693ec 3597 {
3598 mark_jump_label (PATTERN (insn), insn, 0);
3599 njumps++;
3600 if (probability != -1
3601 && any_condjump_p (insn)
3602 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3603 {
e13693ec 3604 /* We can preserve the REG_BR_PROB notes only if exactly
3605 one jump is created, otherwise the machine description
3606 is responsible for this step using
3607 split_branch_probability variable. */
611234b4 3608 gcc_assert (njumps == 1);
9eb946de 3609 add_int_reg_note (insn, REG_BR_PROB, probability);
31d3e01c 3610 }
e13693ec 3611 }
3612 }
3613
3614 /* If we are splitting a CALL_INSN, look for the CALL_INSN
b0bd0491 3615 in SEQ and copy any additional information across. */
6d7dc5b9 3616 if (CALL_P (trial))
e13693ec 3617 {
3618 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
6d7dc5b9 3619 if (CALL_P (insn))
e13693ec 3620 {
b0bd0491 3621 rtx next, *p;
3622
2e3b0d0f 3623 gcc_assert (call_insn == NULL_RTX);
3624 call_insn = insn;
3625
b0bd0491 3626 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3627 target may have explicitly specified. */
3628 p = &CALL_INSN_FUNCTION_USAGE (insn);
0bb5a6cd 3629 while (*p)
3630 p = &XEXP (*p, 1);
3631 *p = CALL_INSN_FUNCTION_USAGE (trial);
b0bd0491 3632
3633 /* If the old call was a sibling call, the new one must
3634 be too. */
e13693ec 3635 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
b0bd0491 3636
3637 /* If the new call is the last instruction in the sequence,
3638 it will effectively replace the old call in-situ. Otherwise
3639 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3640 so that it comes immediately after the new call. */
3641 if (NEXT_INSN (insn))
47e1410d 3642 for (next = NEXT_INSN (trial);
3643 next && NOTE_P (next);
3644 next = NEXT_INSN (next))
3645 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
b0bd0491 3646 {
3647 remove_insn (next);
3648 add_insn_after (next, insn, NULL);
47e1410d 3649 break;
b0bd0491 3650 }
e13693ec 3651 }
3652 }
5262c253 3653
e13693ec 3654 /* Copy notes, particularly those related to the CFG. */
3655 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3656 {
3657 switch (REG_NOTE_KIND (note))
3658 {
3659 case REG_EH_REGION:
e38def9c 3660 copy_reg_eh_region_note_backward (note, insn_last, NULL);
e13693ec 3661 break;
381eb1e7 3662
e13693ec 3663 case REG_NORETURN:
3664 case REG_SETJMP:
4c0315d0 3665 case REG_TM:
698ff1f0 3666 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
381eb1e7 3667 {
6d7dc5b9 3668 if (CALL_P (insn))
a1ddb869 3669 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
381eb1e7 3670 }
e13693ec 3671 break;
5bb27a4b 3672
e13693ec 3673 case REG_NON_LOCAL_GOTO:
aa78dca5 3674 case REG_CROSSING_JUMP:
698ff1f0 3675 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
31d3e01c 3676 {
6d7dc5b9 3677 if (JUMP_P (insn))
a1ddb869 3678 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
31d3e01c 3679 }
e13693ec 3680 break;
344dc2fa 3681
698ff1f0 3682#ifdef AUTO_INC_DEC
3683 case REG_INC:
3684 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3685 {
3686 rtx reg = XEXP (note, 0);
3687 if (!FIND_REG_INC_NOTE (insn, reg)
3688 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
a1ddb869 3689 add_reg_note (insn, REG_INC, reg);
698ff1f0 3690 }
3691 break;
3692#endif
3693
dfe00a8f 3694 case REG_ARGS_SIZE:
3695 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3696 break;
3697
2e3b0d0f 3698 case REG_CALL_DECL:
3699 gcc_assert (call_insn != NULL_RTX);
3700 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3701 break;
3702
e13693ec 3703 default:
3704 break;
15bbde2b 3705 }
e13693ec 3706 }
3707
3708 /* If there are LABELS inside the split insns increment the
3709 usage count so we don't delete the label. */
19d2fe05 3710 if (INSN_P (trial))
e13693ec 3711 {
3712 insn = insn_last;
3713 while (insn != NULL_RTX)
15bbde2b 3714 {
19d2fe05 3715 /* JUMP_P insns have already been "marked" above. */
6d7dc5b9 3716 if (NONJUMP_INSN_P (insn))
e13693ec 3717 mark_label_nuses (PATTERN (insn));
15bbde2b 3718
e13693ec 3719 insn = PREV_INSN (insn);
3720 }
15bbde2b 3721 }
3722
5169661d 3723 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
e13693ec 3724
3725 delete_insn (trial);
3726 if (has_barrier)
3727 emit_barrier_after (tem);
3728
3729 /* Recursively call try_split for each new insn created; by the
3730 time control returns here that insn will be fully split, so
3731 set LAST and continue from the insn after the one returned.
3732 We can't use next_active_insn here since AFTER may be a note.
3733 Ignore deleted insns, which can be occur if not optimizing. */
3734 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3735 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3736 tem = try_split (PATTERN (tem), tem, 1);
3737
3738 /* Return either the first or the last insn, depending on which was
3739 requested. */
3740 return last
06f9d6ef 3741 ? (after ? PREV_INSN (after) : get_last_insn ())
e13693ec 3742 : NEXT_INSN (before);
15bbde2b 3743}
3744\f
3745/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3746 Store PATTERN in the pattern slots. */
15bbde2b 3747
3748rtx
35cb5232 3749make_insn_raw (rtx pattern)
15bbde2b 3750{
19cb6b50 3751 rtx insn;
15bbde2b 3752
d7c47c0e 3753 insn = rtx_alloc (INSN);
15bbde2b 3754
575333f9 3755 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3756 PATTERN (insn) = pattern;
3757 INSN_CODE (insn) = -1;
fc92fa61 3758 REG_NOTES (insn) = NULL;
5169661d 3759 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3760 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3761
fe7f701d 3762#ifdef ENABLE_RTL_CHECKING
3763 if (insn
9204e736 3764 && INSN_P (insn)
fe7f701d 3765 && (returnjump_p (insn)
3766 || (GET_CODE (insn) == SET
3767 && SET_DEST (insn) == pc_rtx)))
3768 {
c3ceba8e 3769 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
fe7f701d 3770 debug_rtx (insn);
3771 }
3772#endif
d823ba47 3773
15bbde2b 3774 return insn;
3775}
3776
9845d120 3777/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3778
e4fdf07d 3779static rtx
9845d120 3780make_debug_insn_raw (rtx pattern)
3781{
3782 rtx insn;
3783
3784 insn = rtx_alloc (DEBUG_INSN);
3785 INSN_UID (insn) = cur_debug_insn_uid++;
3786 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3787 INSN_UID (insn) = cur_insn_uid++;
3788
3789 PATTERN (insn) = pattern;
3790 INSN_CODE (insn) = -1;
3791 REG_NOTES (insn) = NULL;
5169661d 3792 INSN_LOCATION (insn) = curr_insn_location ();
9845d120 3793 BLOCK_FOR_INSN (insn) = NULL;
3794
3795 return insn;
3796}
3797
31d3e01c 3798/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3799
e4fdf07d 3800static rtx
35cb5232 3801make_jump_insn_raw (rtx pattern)
15bbde2b 3802{
19cb6b50 3803 rtx insn;
15bbde2b 3804
6a84e367 3805 insn = rtx_alloc (JUMP_INSN);
fc92fa61 3806 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3807
3808 PATTERN (insn) = pattern;
3809 INSN_CODE (insn) = -1;
fc92fa61 3810 REG_NOTES (insn) = NULL;
3811 JUMP_LABEL (insn) = NULL;
5169661d 3812 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3813 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3814
3815 return insn;
3816}
6e911104 3817
31d3e01c 3818/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3819
3820static rtx
35cb5232 3821make_call_insn_raw (rtx pattern)
6e911104 3822{
19cb6b50 3823 rtx insn;
6e911104 3824
3825 insn = rtx_alloc (CALL_INSN);
3826 INSN_UID (insn) = cur_insn_uid++;
3827
3828 PATTERN (insn) = pattern;
3829 INSN_CODE (insn) = -1;
6e911104 3830 REG_NOTES (insn) = NULL;
3831 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5169661d 3832 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3833 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3834
3835 return insn;
3836}
35f3420b 3837
3838/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3839
3840static rtx
3841make_note_raw (enum insn_note subtype)
3842{
3843 /* Some notes are never created this way at all. These notes are
3844 only created by patching out insns. */
3845 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3846 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3847
3848 rtx note = rtx_alloc (NOTE);
3849 INSN_UID (note) = cur_insn_uid++;
3850 NOTE_KIND (note) = subtype;
3851 BLOCK_FOR_INSN (note) = NULL;
3852 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3853 return note;
3854}
15bbde2b 3855\f
35f3420b 3856/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3857 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3858 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3859
3860static inline void
3861link_insn_into_chain (rtx insn, rtx prev, rtx next)
3862{
3863 PREV_INSN (insn) = prev;
3864 NEXT_INSN (insn) = next;
3865 if (prev != NULL)
3866 {
3867 NEXT_INSN (prev) = insn;
3868 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3869 {
3870 rtx sequence = PATTERN (prev);
3871 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3872 }
3873 }
3874 if (next != NULL)
3875 {
3876 PREV_INSN (next) = insn;
3877 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3878 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3879 }
34f5b9ac 3880
3881 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3882 {
3883 rtx sequence = PATTERN (insn);
3884 PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3885 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3886 }
35f3420b 3887}
3888
15bbde2b 3889/* Add INSN to the end of the doubly-linked list.
3890 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3891
3892void
35cb5232 3893add_insn (rtx insn)
15bbde2b 3894{
35f3420b 3895 rtx prev = get_last_insn ();
3896 link_insn_into_chain (insn, prev, NULL);
06f9d6ef 3897 if (NULL == get_insns ())
3898 set_first_insn (insn);
06f9d6ef 3899 set_last_insn (insn);
15bbde2b 3900}
3901
35f3420b 3902/* Add INSN into the doubly-linked list after insn AFTER. */
15bbde2b 3903
35f3420b 3904static void
3905add_insn_after_nobb (rtx insn, rtx after)
15bbde2b 3906{
3907 rtx next = NEXT_INSN (after);
3908
611234b4 3909 gcc_assert (!optimize || !INSN_DELETED_P (after));
f65c10c0 3910
35f3420b 3911 link_insn_into_chain (insn, after, next);
15bbde2b 3912
35f3420b 3913 if (next == NULL)
15bbde2b 3914 {
35f3420b 3915 if (get_last_insn () == after)
3916 set_last_insn (insn);
3917 else
3918 {
3919 struct sequence_stack *stack = seq_stack;
3920 /* Scan all pending sequences too. */
3921 for (; stack; stack = stack->next)
3922 if (after == stack->last)
3923 {
3924 stack->last = insn;
3925 break;
3926 }
3927 }
15bbde2b 3928 }
35f3420b 3929}
3930
3931/* Add INSN into the doubly-linked list before insn BEFORE. */
3932
3933static void
3934add_insn_before_nobb (rtx insn, rtx before)
3935{
3936 rtx prev = PREV_INSN (before);
3937
3938 gcc_assert (!optimize || !INSN_DELETED_P (before));
3939
3940 link_insn_into_chain (insn, prev, before);
3941
3942 if (prev == NULL)
15bbde2b 3943 {
35f3420b 3944 if (get_insns () == before)
3945 set_first_insn (insn);
3946 else
3947 {
3948 struct sequence_stack *stack = seq_stack;
3949 /* Scan all pending sequences too. */
3950 for (; stack; stack = stack->next)
3951 if (before == stack->first)
3952 {
3953 stack->first = insn;
3954 break;
3955 }
312de84d 3956
35f3420b 3957 gcc_assert (stack);
3958 }
15bbde2b 3959 }
35f3420b 3960}
3961
3962/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3963 If BB is NULL, an attempt is made to infer the bb from before.
3964
3965 This and the next function should be the only functions called
3966 to insert an insn once delay slots have been filled since only
3967 they know how to update a SEQUENCE. */
15bbde2b 3968
35f3420b 3969void
3970add_insn_after (rtx insn, rtx after, basic_block bb)
3971{
3972 add_insn_after_nobb (insn, after);
6d7dc5b9 3973 if (!BARRIER_P (after)
3974 && !BARRIER_P (insn)
9dda7915 3975 && (bb = BLOCK_FOR_INSN (after)))
3976 {
3977 set_block_for_insn (insn, bb);
308f9b79 3978 if (INSN_P (insn))
3072d30e 3979 df_insn_rescan (insn);
9dda7915 3980 /* Should not happen as first in the BB is always
3fb1e43b 3981 either NOTE or LABEL. */
5496dbfc 3982 if (BB_END (bb) == after
9dda7915 3983 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 3984 && !BARRIER_P (insn)
ad4583d9 3985 && !NOTE_INSN_BASIC_BLOCK_P (insn))
5496dbfc 3986 BB_END (bb) = insn;
9dda7915 3987 }
15bbde2b 3988}
3989
35f3420b 3990/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
3991 If BB is NULL, an attempt is made to infer the bb from before.
3992
3993 This and the previous function should be the only functions called
3994 to insert an insn once delay slots have been filled since only
3995 they know how to update a SEQUENCE. */
312de84d 3996
3997void
3072d30e 3998add_insn_before (rtx insn, rtx before, basic_block bb)
312de84d 3999{
35f3420b 4000 add_insn_before_nobb (insn, before);
312de84d 4001
48e1416a 4002 if (!bb
3072d30e 4003 && !BARRIER_P (before)
4004 && !BARRIER_P (insn))
4005 bb = BLOCK_FOR_INSN (before);
4006
4007 if (bb)
9dda7915 4008 {
4009 set_block_for_insn (insn, bb);
308f9b79 4010 if (INSN_P (insn))
3072d30e 4011 df_insn_rescan (insn);
611234b4 4012 /* Should not happen as first in the BB is always either NOTE or
ba821eb1 4013 LABEL. */
611234b4 4014 gcc_assert (BB_HEAD (bb) != insn
4015 /* Avoid clobbering of structure when creating new BB. */
4016 || BARRIER_P (insn)
ad4583d9 4017 || NOTE_INSN_BASIC_BLOCK_P (insn));
9dda7915 4018 }
312de84d 4019}
4020
3072d30e 4021/* Replace insn with an deleted instruction note. */
4022
fc3d1695 4023void
4024set_insn_deleted (rtx insn)
3072d30e 4025{
91f71fa3 4026 if (INSN_P (insn))
b983ea33 4027 df_insn_delete (insn);
3072d30e 4028 PUT_CODE (insn, NOTE);
4029 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4030}
4031
4032
93ff53d3 4033/* Unlink INSN from the insn chain.
4034
4035 This function knows how to handle sequences.
4036
4037 This function does not invalidate data flow information associated with
4038 INSN (i.e. does not call df_insn_delete). That makes this function
4039 usable for only disconnecting an insn from the chain, and re-emit it
4040 elsewhere later.
4041
4042 To later insert INSN elsewhere in the insn chain via add_insn and
4043 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4044 the caller. Nullifying them here breaks many insn chain walks.
4045
4046 To really delete an insn and related DF information, use delete_insn. */
4047
7ddcf2bf 4048void
35cb5232 4049remove_insn (rtx insn)
7ddcf2bf 4050{
4051 rtx next = NEXT_INSN (insn);
4052 rtx prev = PREV_INSN (insn);
e4bf866d 4053 basic_block bb;
4054
7ddcf2bf 4055 if (prev)
4056 {
4057 NEXT_INSN (prev) = next;
6d7dc5b9 4058 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
7ddcf2bf 4059 {
4060 rtx sequence = PATTERN (prev);
4061 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
4062 }
4063 }
06f9d6ef 4064 else if (get_insns () == insn)
4065 {
c8f0c143 4066 if (next)
4067 PREV_INSN (next) = NULL;
06f9d6ef 4068 set_first_insn (next);
4069 }
7ddcf2bf 4070 else
4071 {
0a893c29 4072 struct sequence_stack *stack = seq_stack;
7ddcf2bf 4073 /* Scan all pending sequences too. */
4074 for (; stack; stack = stack->next)
4075 if (insn == stack->first)
4076 {
4077 stack->first = next;
4078 break;
4079 }
4080
611234b4 4081 gcc_assert (stack);
7ddcf2bf 4082 }
4083
4084 if (next)
4085 {
4086 PREV_INSN (next) = prev;
6d7dc5b9 4087 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
7ddcf2bf 4088 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4089 }
06f9d6ef 4090 else if (get_last_insn () == insn)
4091 set_last_insn (prev);
7ddcf2bf 4092 else
4093 {
0a893c29 4094 struct sequence_stack *stack = seq_stack;
7ddcf2bf 4095 /* Scan all pending sequences too. */
4096 for (; stack; stack = stack->next)
4097 if (insn == stack->last)
4098 {
4099 stack->last = prev;
4100 break;
4101 }
4102
611234b4 4103 gcc_assert (stack);
7ddcf2bf 4104 }
b983ea33 4105
b983ea33 4106 /* Fix up basic block boundaries, if necessary. */
6d7dc5b9 4107 if (!BARRIER_P (insn)
e4bf866d 4108 && (bb = BLOCK_FOR_INSN (insn)))
4109 {
5496dbfc 4110 if (BB_HEAD (bb) == insn)
e4bf866d 4111 {
f4aee538 4112 /* Never ever delete the basic block note without deleting whole
4113 basic block. */
611234b4 4114 gcc_assert (!NOTE_P (insn));
5496dbfc 4115 BB_HEAD (bb) = next;
e4bf866d 4116 }
5496dbfc 4117 if (BB_END (bb) == insn)
4118 BB_END (bb) = prev;
e4bf866d 4119 }
7ddcf2bf 4120}
4121
d5f9786f 4122/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4123
4124void
35cb5232 4125add_function_usage_to (rtx call_insn, rtx call_fusage)
d5f9786f 4126{
611234b4 4127 gcc_assert (call_insn && CALL_P (call_insn));
d5f9786f 4128
4129 /* Put the register usage information on the CALL. If there is already
4130 some usage information, put ours at the end. */
4131 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4132 {
4133 rtx link;
4134
4135 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4136 link = XEXP (link, 1))
4137 ;
4138
4139 XEXP (link, 1) = call_fusage;
4140 }
4141 else
4142 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4143}
4144
15bbde2b 4145/* Delete all insns made since FROM.
4146 FROM becomes the new last instruction. */
4147
4148void
35cb5232 4149delete_insns_since (rtx from)
15bbde2b 4150{
4151 if (from == 0)
06f9d6ef 4152 set_first_insn (0);
15bbde2b 4153 else
4154 NEXT_INSN (from) = 0;
06f9d6ef 4155 set_last_insn (from);
15bbde2b 4156}
4157
34e2ddcd 4158/* This function is deprecated, please use sequences instead.
4159
4160 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 4161 The insns to be moved are those between FROM and TO.
4162 They are moved to a new position after the insn AFTER.
4163 AFTER must not be FROM or TO or any insn in between.
4164
4165 This function does not know about SEQUENCEs and hence should not be
4166 called after delay-slot filling has been done. */
4167
4168void
35cb5232 4169reorder_insns_nobb (rtx from, rtx to, rtx after)
15bbde2b 4170{
7f6ca11f 4171#ifdef ENABLE_CHECKING
4172 rtx x;
4173 for (x = from; x != to; x = NEXT_INSN (x))
4174 gcc_assert (after != x);
4175 gcc_assert (after != to);
4176#endif
4177
15bbde2b 4178 /* Splice this bunch out of where it is now. */
4179 if (PREV_INSN (from))
4180 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4181 if (NEXT_INSN (to))
4182 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
06f9d6ef 4183 if (get_last_insn () == to)
4184 set_last_insn (PREV_INSN (from));
4185 if (get_insns () == from)
4186 set_first_insn (NEXT_INSN (to));
15bbde2b 4187
4188 /* Make the new neighbors point to it and it to them. */
4189 if (NEXT_INSN (after))
4190 PREV_INSN (NEXT_INSN (after)) = to;
4191
4192 NEXT_INSN (to) = NEXT_INSN (after);
4193 PREV_INSN (from) = after;
4194 NEXT_INSN (after) = from;
9af5ce0c 4195 if (after == get_last_insn ())
06f9d6ef 4196 set_last_insn (to);
15bbde2b 4197}
4198
9dda7915 4199/* Same as function above, but take care to update BB boundaries. */
4200void
35cb5232 4201reorder_insns (rtx from, rtx to, rtx after)
9dda7915 4202{
4203 rtx prev = PREV_INSN (from);
4204 basic_block bb, bb2;
4205
4206 reorder_insns_nobb (from, to, after);
4207
6d7dc5b9 4208 if (!BARRIER_P (after)
9dda7915 4209 && (bb = BLOCK_FOR_INSN (after)))
4210 {
4211 rtx x;
3072d30e 4212 df_set_bb_dirty (bb);
d4c5e26d 4213
6d7dc5b9 4214 if (!BARRIER_P (from)
9dda7915 4215 && (bb2 = BLOCK_FOR_INSN (from)))
4216 {
5496dbfc 4217 if (BB_END (bb2) == to)
4218 BB_END (bb2) = prev;
3072d30e 4219 df_set_bb_dirty (bb2);
9dda7915 4220 }
4221
5496dbfc 4222 if (BB_END (bb) == after)
4223 BB_END (bb) = to;
9dda7915 4224
4225 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7097dd0c 4226 if (!BARRIER_P (x))
a2bdd643 4227 df_insn_change_bb (x, bb);
9dda7915 4228 }
4229}
4230
15bbde2b 4231\f
31d3e01c 4232/* Emit insn(s) of given code and pattern
4233 at a specified place within the doubly-linked list.
15bbde2b 4234
31d3e01c 4235 All of the emit_foo global entry points accept an object
4236 X which is either an insn list or a PATTERN of a single
4237 instruction.
15bbde2b 4238
31d3e01c 4239 There are thus a few canonical ways to generate code and
4240 emit it at a specific place in the instruction stream. For
4241 example, consider the instruction named SPOT and the fact that
4242 we would like to emit some instructions before SPOT. We might
4243 do it like this:
15bbde2b 4244
31d3e01c 4245 start_sequence ();
4246 ... emit the new instructions ...
4247 insns_head = get_insns ();
4248 end_sequence ();
15bbde2b 4249
31d3e01c 4250 emit_insn_before (insns_head, SPOT);
15bbde2b 4251
31d3e01c 4252 It used to be common to generate SEQUENCE rtl instead, but that
4253 is a relic of the past which no longer occurs. The reason is that
4254 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4255 generated would almost certainly die right after it was created. */
15bbde2b 4256
5f7c5ddd 4257static rtx
4258emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4259 rtx (*make_raw) (rtx))
15bbde2b 4260{
19cb6b50 4261 rtx insn;
15bbde2b 4262
611234b4 4263 gcc_assert (before);
31d3e01c 4264
4265 if (x == NULL_RTX)
4266 return last;
4267
4268 switch (GET_CODE (x))
15bbde2b 4269 {
9845d120 4270 case DEBUG_INSN:
31d3e01c 4271 case INSN:
4272 case JUMP_INSN:
4273 case CALL_INSN:
4274 case CODE_LABEL:
4275 case BARRIER:
4276 case NOTE:
4277 insn = x;
4278 while (insn)
4279 {
4280 rtx next = NEXT_INSN (insn);
3072d30e 4281 add_insn_before (insn, before, bb);
31d3e01c 4282 last = insn;
4283 insn = next;
4284 }
4285 break;
4286
4287#ifdef ENABLE_RTL_CHECKING
4288 case SEQUENCE:
611234b4 4289 gcc_unreachable ();
31d3e01c 4290 break;
4291#endif
4292
4293 default:
5f7c5ddd 4294 last = (*make_raw) (x);
3072d30e 4295 add_insn_before (last, before, bb);
31d3e01c 4296 break;
15bbde2b 4297 }
4298
31d3e01c 4299 return last;
15bbde2b 4300}
4301
5f7c5ddd 4302/* Make X be output before the instruction BEFORE. */
4303
4304rtx
4305emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4306{
4307 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4308}
4309
31d3e01c 4310/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4311 and output it before the instruction BEFORE. */
4312
4313rtx
0891f67c 4314emit_jump_insn_before_noloc (rtx x, rtx before)
15bbde2b 4315{
5f7c5ddd 4316 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4317 make_jump_insn_raw);
15bbde2b 4318}
4319
31d3e01c 4320/* Make an instruction with body X and code CALL_INSN
cd0fe062 4321 and output it before the instruction BEFORE. */
4322
4323rtx
0891f67c 4324emit_call_insn_before_noloc (rtx x, rtx before)
cd0fe062 4325{
5f7c5ddd 4326 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4327 make_call_insn_raw);
cd0fe062 4328}
4329
9845d120 4330/* Make an instruction with body X and code DEBUG_INSN
4331 and output it before the instruction BEFORE. */
4332
4333rtx
4334emit_debug_insn_before_noloc (rtx x, rtx before)
4335{
5f7c5ddd 4336 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4337 make_debug_insn_raw);
9845d120 4338}
4339
15bbde2b 4340/* Make an insn of code BARRIER
71caadc0 4341 and output it before the insn BEFORE. */
15bbde2b 4342
4343rtx
35cb5232 4344emit_barrier_before (rtx before)
15bbde2b 4345{
19cb6b50 4346 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4347
4348 INSN_UID (insn) = cur_insn_uid++;
4349
3072d30e 4350 add_insn_before (insn, before, NULL);
15bbde2b 4351 return insn;
4352}
4353
71caadc0 4354/* Emit the label LABEL before the insn BEFORE. */
4355
4356rtx
35cb5232 4357emit_label_before (rtx label, rtx before)
71caadc0 4358{
596ef494 4359 gcc_checking_assert (INSN_UID (label) == 0);
4360 INSN_UID (label) = cur_insn_uid++;
4361 add_insn_before (label, before, NULL);
71caadc0 4362 return label;
4363}
15bbde2b 4364\f
31d3e01c 4365/* Helper for emit_insn_after, handles lists of instructions
4366 efficiently. */
15bbde2b 4367
31d3e01c 4368static rtx
3072d30e 4369emit_insn_after_1 (rtx first, rtx after, basic_block bb)
15bbde2b 4370{
31d3e01c 4371 rtx last;
4372 rtx after_after;
3072d30e 4373 if (!bb && !BARRIER_P (after))
4374 bb = BLOCK_FOR_INSN (after);
15bbde2b 4375
3072d30e 4376 if (bb)
15bbde2b 4377 {
3072d30e 4378 df_set_bb_dirty (bb);
31d3e01c 4379 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
6d7dc5b9 4380 if (!BARRIER_P (last))
3072d30e 4381 {
4382 set_block_for_insn (last, bb);
4383 df_insn_rescan (last);
4384 }
6d7dc5b9 4385 if (!BARRIER_P (last))
3072d30e 4386 {
4387 set_block_for_insn (last, bb);
4388 df_insn_rescan (last);
4389 }
5496dbfc 4390 if (BB_END (bb) == after)
4391 BB_END (bb) = last;
15bbde2b 4392 }
4393 else
31d3e01c 4394 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4395 continue;
4396
4397 after_after = NEXT_INSN (after);
4398
4399 NEXT_INSN (after) = first;
4400 PREV_INSN (first) = after;
4401 NEXT_INSN (last) = after_after;
4402 if (after_after)
4403 PREV_INSN (after_after) = last;
4404
9af5ce0c 4405 if (after == get_last_insn ())
06f9d6ef 4406 set_last_insn (last);
e1ab7874 4407
31d3e01c 4408 return last;
4409}
4410
5f7c5ddd 4411static rtx
4412emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4413 rtx (*make_raw)(rtx))
31d3e01c 4414{
4415 rtx last = after;
4416
611234b4 4417 gcc_assert (after);
31d3e01c 4418
4419 if (x == NULL_RTX)
4420 return last;
4421
4422 switch (GET_CODE (x))
15bbde2b 4423 {
9845d120 4424 case DEBUG_INSN:
31d3e01c 4425 case INSN:
4426 case JUMP_INSN:
4427 case CALL_INSN:
4428 case CODE_LABEL:
4429 case BARRIER:
4430 case NOTE:
3072d30e 4431 last = emit_insn_after_1 (x, after, bb);
31d3e01c 4432 break;
4433
4434#ifdef ENABLE_RTL_CHECKING
4435 case SEQUENCE:
611234b4 4436 gcc_unreachable ();
31d3e01c 4437 break;
4438#endif
4439
4440 default:
5f7c5ddd 4441 last = (*make_raw) (x);
3072d30e 4442 add_insn_after (last, after, bb);
31d3e01c 4443 break;
15bbde2b 4444 }
4445
31d3e01c 4446 return last;
15bbde2b 4447}
4448
5f7c5ddd 4449/* Make X be output after the insn AFTER and set the BB of insn. If
4450 BB is NULL, an attempt is made to infer the BB from AFTER. */
4451
4452rtx
4453emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4454{
4455 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4456}
4457
1bea98fb 4458
31d3e01c 4459/* Make an insn of code JUMP_INSN with body X
15bbde2b 4460 and output it after the insn AFTER. */
4461
4462rtx
0891f67c 4463emit_jump_insn_after_noloc (rtx x, rtx after)
15bbde2b 4464{
5f7c5ddd 4465 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
31d3e01c 4466}
4467
4468/* Make an instruction with body X and code CALL_INSN
4469 and output it after the instruction AFTER. */
4470
4471rtx
0891f67c 4472emit_call_insn_after_noloc (rtx x, rtx after)
31d3e01c 4473{
5f7c5ddd 4474 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
15bbde2b 4475}
4476
9845d120 4477/* Make an instruction with body X and code CALL_INSN
4478 and output it after the instruction AFTER. */
4479
4480rtx
4481emit_debug_insn_after_noloc (rtx x, rtx after)
4482{
5f7c5ddd 4483 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
9845d120 4484}
4485
15bbde2b 4486/* Make an insn of code BARRIER
4487 and output it after the insn AFTER. */
4488
4489rtx
35cb5232 4490emit_barrier_after (rtx after)
15bbde2b 4491{
19cb6b50 4492 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4493
4494 INSN_UID (insn) = cur_insn_uid++;
4495
3072d30e 4496 add_insn_after (insn, after, NULL);
15bbde2b 4497 return insn;
4498}
4499
4500/* Emit the label LABEL after the insn AFTER. */
4501
4502rtx
35cb5232 4503emit_label_after (rtx label, rtx after)
15bbde2b 4504{
596ef494 4505 gcc_checking_assert (INSN_UID (label) == 0);
4506 INSN_UID (label) = cur_insn_uid++;
4507 add_insn_after (label, after, NULL);
15bbde2b 4508 return label;
4509}
35f3420b 4510\f
4511/* Notes require a bit of special handling: Some notes need to have their
4512 BLOCK_FOR_INSN set, others should never have it set, and some should
4513 have it set or clear depending on the context. */
4514
4515/* Return true iff a note of kind SUBTYPE should be emitted with routines
4516 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4517 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4518
4519static bool
4520note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4521{
4522 switch (subtype)
4523 {
4524 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4525 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4526 return true;
4527
4528 /* Notes for var tracking and EH region markers can appear between or
4529 inside basic blocks. If the caller is emitting on the basic block
4530 boundary, do not set BLOCK_FOR_INSN on the new note. */
4531 case NOTE_INSN_VAR_LOCATION:
4532 case NOTE_INSN_CALL_ARG_LOCATION:
4533 case NOTE_INSN_EH_REGION_BEG:
4534 case NOTE_INSN_EH_REGION_END:
4535 return on_bb_boundary_p;
4536
4537 /* Otherwise, BLOCK_FOR_INSN must be set. */
4538 default:
4539 return false;
4540 }
4541}
15bbde2b 4542
4543/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4544
4545rtx
ad4583d9 4546emit_note_after (enum insn_note subtype, rtx after)
15bbde2b 4547{
35f3420b 4548 rtx note = make_note_raw (subtype);
4549 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4550 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4551
4552 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4553 add_insn_after_nobb (note, after);
4554 else
4555 add_insn_after (note, after, bb);
4556 return note;
4557}
4558
4559/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4560
4561rtx
4562emit_note_before (enum insn_note subtype, rtx before)
4563{
4564 rtx note = make_note_raw (subtype);
4565 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4566 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4567
4568 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4569 add_insn_before_nobb (note, before);
4570 else
4571 add_insn_before (note, before, bb);
15bbde2b 4572 return note;
4573}
15bbde2b 4574\f
ede4ebcb 4575/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4576 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4577
4578static rtx
4579emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4580 rtx (*make_raw) (rtx))
d321a68b 4581{
ede4ebcb 4582 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
d321a68b 4583
0891f67c 4584 if (pattern == NULL_RTX || !loc)
ca154f3f 4585 return last;
4586
31d3e01c 4587 after = NEXT_INSN (after);
4588 while (1)
4589 {
5169661d 4590 if (active_insn_p (after) && !INSN_LOCATION (after))
4591 INSN_LOCATION (after) = loc;
31d3e01c 4592 if (after == last)
4593 break;
4594 after = NEXT_INSN (after);
4595 }
d321a68b 4596 return last;
4597}
4598
ede4ebcb 4599/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4600 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4601 any DEBUG_INSNs. */
4602
4603static rtx
4604emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4605 rtx (*make_raw) (rtx))
0891f67c 4606{
9845d120 4607 rtx prev = after;
4608
ede4ebcb 4609 if (skip_debug_insns)
4610 while (DEBUG_INSN_P (prev))
4611 prev = PREV_INSN (prev);
9845d120 4612
4613 if (INSN_P (prev))
5169661d 4614 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
ede4ebcb 4615 make_raw);
0891f67c 4616 else
ede4ebcb 4617 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0891f67c 4618}
4619
5169661d 4620/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
d321a68b 4621rtx
ede4ebcb 4622emit_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4623{
ede4ebcb 4624 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4625}
31d3e01c 4626
5169661d 4627/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
ede4ebcb 4628rtx
4629emit_insn_after (rtx pattern, rtx after)
4630{
4631 return emit_pattern_after (pattern, after, true, make_insn_raw);
4632}
ca154f3f 4633
5169661d 4634/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
ede4ebcb 4635rtx
4636emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4637{
4638 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
d321a68b 4639}
4640
5169661d 4641/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
0891f67c 4642rtx
4643emit_jump_insn_after (rtx pattern, rtx after)
4644{
ede4ebcb 4645 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
0891f67c 4646}
4647
5169661d 4648/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
d321a68b 4649rtx
35cb5232 4650emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4651{
ede4ebcb 4652 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
d321a68b 4653}
4654
5169661d 4655/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
0891f67c 4656rtx
4657emit_call_insn_after (rtx pattern, rtx after)
4658{
ede4ebcb 4659 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
0891f67c 4660}
4661
5169661d 4662/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
9845d120 4663rtx
4664emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4665{
ede4ebcb 4666 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
9845d120 4667}
4668
5169661d 4669/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
9845d120 4670rtx
4671emit_debug_insn_after (rtx pattern, rtx after)
4672{
ede4ebcb 4673 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
9845d120 4674}
4675
ede4ebcb 4676/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4677 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4678 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4679 CALL_INSN, etc. */
4680
4681static rtx
4682emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4683 rtx (*make_raw) (rtx))
d321a68b 4684{
4685 rtx first = PREV_INSN (before);
ede4ebcb 4686 rtx last = emit_pattern_before_noloc (pattern, before,
4687 insnp ? before : NULL_RTX,
4688 NULL, make_raw);
0891f67c 4689
4690 if (pattern == NULL_RTX || !loc)
4691 return last;
4692
4486418e 4693 if (!first)
4694 first = get_insns ();
4695 else
4696 first = NEXT_INSN (first);
0891f67c 4697 while (1)
4698 {
5169661d 4699 if (active_insn_p (first) && !INSN_LOCATION (first))
4700 INSN_LOCATION (first) = loc;
0891f67c 4701 if (first == last)
4702 break;
4703 first = NEXT_INSN (first);
4704 }
4705 return last;
4706}
4707
ede4ebcb 4708/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4709 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4710 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4711 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4712
4713static rtx
4714emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4715 bool insnp, rtx (*make_raw) (rtx))
0891f67c 4716{
9845d120 4717 rtx next = before;
4718
ede4ebcb 4719 if (skip_debug_insns)
4720 while (DEBUG_INSN_P (next))
4721 next = PREV_INSN (next);
9845d120 4722
4723 if (INSN_P (next))
5169661d 4724 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
ede4ebcb 4725 insnp, make_raw);
0891f67c 4726 else
ede4ebcb 4727 return emit_pattern_before_noloc (pattern, before,
4728 insnp ? before : NULL_RTX,
4729 NULL, make_raw);
0891f67c 4730}
4731
5169661d 4732/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
0891f67c 4733rtx
ede4ebcb 4734emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0891f67c 4735{
ede4ebcb 4736 return emit_pattern_before_setloc (pattern, before, loc, true,
4737 make_insn_raw);
4738}
0891f67c 4739
5169661d 4740/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
ede4ebcb 4741rtx
4742emit_insn_before (rtx pattern, rtx before)
4743{
4744 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4745}
0891f67c 4746
5169661d 4747/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
ede4ebcb 4748rtx
4749emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4750{
4751 return emit_pattern_before_setloc (pattern, before, loc, false,
4752 make_jump_insn_raw);
0891f67c 4753}
4754
5169661d 4755/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
0891f67c 4756rtx
4757emit_jump_insn_before (rtx pattern, rtx before)
4758{
ede4ebcb 4759 return emit_pattern_before (pattern, before, true, false,
4760 make_jump_insn_raw);
0891f67c 4761}
4762
5169661d 4763/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
0891f67c 4764rtx
4765emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4766{
ede4ebcb 4767 return emit_pattern_before_setloc (pattern, before, loc, false,
4768 make_call_insn_raw);
d321a68b 4769}
0891f67c 4770
ede4ebcb 4771/* Like emit_call_insn_before_noloc,
5169661d 4772 but set insn_location according to BEFORE. */
0891f67c 4773rtx
4774emit_call_insn_before (rtx pattern, rtx before)
4775{
ede4ebcb 4776 return emit_pattern_before (pattern, before, true, false,
4777 make_call_insn_raw);
0891f67c 4778}
9845d120 4779
5169661d 4780/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
9845d120 4781rtx
4782emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4783{
ede4ebcb 4784 return emit_pattern_before_setloc (pattern, before, loc, false,
4785 make_debug_insn_raw);
9845d120 4786}
4787
ede4ebcb 4788/* Like emit_debug_insn_before_noloc,
5169661d 4789 but set insn_location according to BEFORE. */
9845d120 4790rtx
4791emit_debug_insn_before (rtx pattern, rtx before)
4792{
ede4ebcb 4793 return emit_pattern_before (pattern, before, false, false,
4794 make_debug_insn_raw);
9845d120 4795}
d321a68b 4796\f
31d3e01c 4797/* Take X and emit it at the end of the doubly-linked
4798 INSN list.
15bbde2b 4799
4800 Returns the last insn emitted. */
4801
4802rtx
35cb5232 4803emit_insn (rtx x)
15bbde2b 4804{
9af5ce0c 4805 rtx last = get_last_insn ();
31d3e01c 4806 rtx insn;
15bbde2b 4807
31d3e01c 4808 if (x == NULL_RTX)
4809 return last;
15bbde2b 4810
31d3e01c 4811 switch (GET_CODE (x))
4812 {
9845d120 4813 case DEBUG_INSN:
31d3e01c 4814 case INSN:
4815 case JUMP_INSN:
4816 case CALL_INSN:
4817 case CODE_LABEL:
4818 case BARRIER:
4819 case NOTE:
4820 insn = x;
4821 while (insn)
15bbde2b 4822 {
31d3e01c 4823 rtx next = NEXT_INSN (insn);
15bbde2b 4824 add_insn (insn);
31d3e01c 4825 last = insn;
4826 insn = next;
15bbde2b 4827 }
31d3e01c 4828 break;
15bbde2b 4829
31d3e01c 4830#ifdef ENABLE_RTL_CHECKING
91f71fa3 4831 case JUMP_TABLE_DATA:
31d3e01c 4832 case SEQUENCE:
611234b4 4833 gcc_unreachable ();
31d3e01c 4834 break;
4835#endif
15bbde2b 4836
31d3e01c 4837 default:
4838 last = make_insn_raw (x);
4839 add_insn (last);
4840 break;
15bbde2b 4841 }
4842
4843 return last;
4844}
4845
9845d120 4846/* Make an insn of code DEBUG_INSN with pattern X
4847 and add it to the end of the doubly-linked list. */
4848
4849rtx
4850emit_debug_insn (rtx x)
4851{
9af5ce0c 4852 rtx last = get_last_insn ();
9845d120 4853 rtx insn;
4854
4855 if (x == NULL_RTX)
4856 return last;
4857
4858 switch (GET_CODE (x))
4859 {
4860 case DEBUG_INSN:
4861 case INSN:
4862 case JUMP_INSN:
4863 case CALL_INSN:
4864 case CODE_LABEL:
4865 case BARRIER:
4866 case NOTE:
4867 insn = x;
4868 while (insn)
4869 {
4870 rtx next = NEXT_INSN (insn);
4871 add_insn (insn);
4872 last = insn;
4873 insn = next;
4874 }
4875 break;
4876
4877#ifdef ENABLE_RTL_CHECKING
91f71fa3 4878 case JUMP_TABLE_DATA:
9845d120 4879 case SEQUENCE:
4880 gcc_unreachable ();
4881 break;
4882#endif
4883
4884 default:
4885 last = make_debug_insn_raw (x);
4886 add_insn (last);
4887 break;
4888 }
4889
4890 return last;
4891}
4892
31d3e01c 4893/* Make an insn of code JUMP_INSN with pattern X
4894 and add it to the end of the doubly-linked list. */
15bbde2b 4895
4896rtx
35cb5232 4897emit_jump_insn (rtx x)
15bbde2b 4898{
d90b3d04 4899 rtx last = NULL_RTX, insn;
15bbde2b 4900
31d3e01c 4901 switch (GET_CODE (x))
15bbde2b 4902 {
9845d120 4903 case DEBUG_INSN:
31d3e01c 4904 case INSN:
4905 case JUMP_INSN:
4906 case CALL_INSN:
4907 case CODE_LABEL:
4908 case BARRIER:
4909 case NOTE:
4910 insn = x;
4911 while (insn)
4912 {
4913 rtx next = NEXT_INSN (insn);
4914 add_insn (insn);
4915 last = insn;
4916 insn = next;
4917 }
4918 break;
b36b07d8 4919
31d3e01c 4920#ifdef ENABLE_RTL_CHECKING
91f71fa3 4921 case JUMP_TABLE_DATA:
31d3e01c 4922 case SEQUENCE:
611234b4 4923 gcc_unreachable ();
31d3e01c 4924 break;
4925#endif
b36b07d8 4926
31d3e01c 4927 default:
4928 last = make_jump_insn_raw (x);
4929 add_insn (last);
4930 break;
9dda7915 4931 }
b36b07d8 4932
4933 return last;
4934}
4935
31d3e01c 4936/* Make an insn of code CALL_INSN with pattern X
15bbde2b 4937 and add it to the end of the doubly-linked list. */
4938
4939rtx
35cb5232 4940emit_call_insn (rtx x)
15bbde2b 4941{
31d3e01c 4942 rtx insn;
4943
4944 switch (GET_CODE (x))
15bbde2b 4945 {
9845d120 4946 case DEBUG_INSN:
31d3e01c 4947 case INSN:
4948 case JUMP_INSN:
4949 case CALL_INSN:
4950 case CODE_LABEL:
4951 case BARRIER:
4952 case NOTE:
4953 insn = emit_insn (x);
4954 break;
15bbde2b 4955
31d3e01c 4956#ifdef ENABLE_RTL_CHECKING
4957 case SEQUENCE:
91f71fa3 4958 case JUMP_TABLE_DATA:
611234b4 4959 gcc_unreachable ();
31d3e01c 4960 break;
4961#endif
15bbde2b 4962
31d3e01c 4963 default:
4964 insn = make_call_insn_raw (x);
15bbde2b 4965 add_insn (insn);
31d3e01c 4966 break;
15bbde2b 4967 }
31d3e01c 4968
4969 return insn;
15bbde2b 4970}
4971
4972/* Add the label LABEL to the end of the doubly-linked list. */
4973
4974rtx
35cb5232 4975emit_label (rtx label)
15bbde2b 4976{
596ef494 4977 gcc_checking_assert (INSN_UID (label) == 0);
4978 INSN_UID (label) = cur_insn_uid++;
4979 add_insn (label);
15bbde2b 4980 return label;
4981}
4982
91f71fa3 4983/* Make an insn of code JUMP_TABLE_DATA
4984 and add it to the end of the doubly-linked list. */
4985
4986rtx
4987emit_jump_table_data (rtx table)
4988{
4989 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4990 INSN_UID (jump_table_data) = cur_insn_uid++;
4991 PATTERN (jump_table_data) = table;
4992 BLOCK_FOR_INSN (jump_table_data) = NULL;
4993 add_insn (jump_table_data);
4994 return jump_table_data;
4995}
4996
15bbde2b 4997/* Make an insn of code BARRIER
4998 and add it to the end of the doubly-linked list. */
4999
5000rtx
35cb5232 5001emit_barrier (void)
15bbde2b 5002{
19cb6b50 5003 rtx barrier = rtx_alloc (BARRIER);
15bbde2b 5004 INSN_UID (barrier) = cur_insn_uid++;
5005 add_insn (barrier);
5006 return barrier;
5007}
5008
2f57e3d9 5009/* Emit a copy of note ORIG. */
35cb5232 5010
2f57e3d9 5011rtx
5012emit_note_copy (rtx orig)
5013{
35f3420b 5014 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5015 rtx note = make_note_raw (kind);
2f57e3d9 5016 NOTE_DATA (note) = NOTE_DATA (orig);
2f57e3d9 5017 add_insn (note);
31b97e8f 5018 return note;
15bbde2b 5019}
5020
31b97e8f 5021/* Make an insn of code NOTE or type NOTE_NO
5022 and add it to the end of the doubly-linked list. */
15bbde2b 5023
5024rtx
ad4583d9 5025emit_note (enum insn_note kind)
15bbde2b 5026{
35f3420b 5027 rtx note = make_note_raw (kind);
15bbde2b 5028 add_insn (note);
5029 return note;
5030}
5031
18b42941 5032/* Emit a clobber of lvalue X. */
5033
5034rtx
5035emit_clobber (rtx x)
5036{
5037 /* CONCATs should not appear in the insn stream. */
5038 if (GET_CODE (x) == CONCAT)
5039 {
5040 emit_clobber (XEXP (x, 0));
5041 return emit_clobber (XEXP (x, 1));
5042 }
5043 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5044}
5045
5046/* Return a sequence of insns to clobber lvalue X. */
5047
5048rtx
5049gen_clobber (rtx x)
5050{
5051 rtx seq;
5052
5053 start_sequence ();
5054 emit_clobber (x);
5055 seq = get_insns ();
5056 end_sequence ();
5057 return seq;
5058}
5059
5060/* Emit a use of rvalue X. */
5061
5062rtx
5063emit_use (rtx x)
5064{
5065 /* CONCATs should not appear in the insn stream. */
5066 if (GET_CODE (x) == CONCAT)
5067 {
5068 emit_use (XEXP (x, 0));
5069 return emit_use (XEXP (x, 1));
5070 }
5071 return emit_insn (gen_rtx_USE (VOIDmode, x));
5072}
5073
5074/* Return a sequence of insns to use rvalue X. */
5075
5076rtx
5077gen_use (rtx x)
5078{
5079 rtx seq;
5080
5081 start_sequence ();
5082 emit_use (x);
5083 seq = get_insns ();
5084 end_sequence ();
5085 return seq;
5086}
5087
f1934a33 5088/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 5089 note of this type already exists, remove it first. */
f1934a33 5090
c080d8f0 5091rtx
35cb5232 5092set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
f1934a33 5093{
5094 rtx note = find_reg_note (insn, kind, NULL_RTX);
5095
7e6224ab 5096 switch (kind)
5097 {
5098 case REG_EQUAL:
5099 case REG_EQUIV:
5100 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5101 has multiple sets (some callers assume single_set
5102 means the insn only has one set, when in fact it
5103 means the insn only has one * useful * set). */
5104 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5105 {
611234b4 5106 gcc_assert (!note);
7e6224ab 5107 return NULL_RTX;
5108 }
5109
5110 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5111 It serves no useful purpose and breaks eliminate_regs. */
5112 if (GET_CODE (datum) == ASM_OPERANDS)
5113 return NULL_RTX;
3072d30e 5114
5115 if (note)
5116 {
5117 XEXP (note, 0) = datum;
5118 df_notes_rescan (insn);
5119 return note;
5120 }
7e6224ab 5121 break;
5122
5123 default:
3072d30e 5124 if (note)
5125 {
5126 XEXP (note, 0) = datum;
5127 return note;
5128 }
7e6224ab 5129 break;
5130 }
c080d8f0 5131
a1ddb869 5132 add_reg_note (insn, kind, datum);
3072d30e 5133
5134 switch (kind)
c080d8f0 5135 {
3072d30e 5136 case REG_EQUAL:
5137 case REG_EQUIV:
5138 df_notes_rescan (insn);
5139 break;
5140 default:
5141 break;
c080d8f0 5142 }
f1934a33 5143
c080d8f0 5144 return REG_NOTES (insn);
f1934a33 5145}
41cf444a 5146
5147/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5148rtx
5149set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5150{
5151 rtx set = single_set (insn);
5152
5153 if (set && SET_DEST (set) == dst)
5154 return set_unique_reg_note (insn, kind, datum);
5155 return NULL_RTX;
5156}
15bbde2b 5157\f
5158/* Return an indication of which type of insn should have X as a body.
5159 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5160
9b69f75b 5161static enum rtx_code
35cb5232 5162classify_insn (rtx x)
15bbde2b 5163{
6d7dc5b9 5164 if (LABEL_P (x))
15bbde2b 5165 return CODE_LABEL;
5166 if (GET_CODE (x) == CALL)
5167 return CALL_INSN;
9cb2517e 5168 if (ANY_RETURN_P (x))
15bbde2b 5169 return JUMP_INSN;
5170 if (GET_CODE (x) == SET)
5171 {
5172 if (SET_DEST (x) == pc_rtx)
5173 return JUMP_INSN;
5174 else if (GET_CODE (SET_SRC (x)) == CALL)
5175 return CALL_INSN;
5176 else
5177 return INSN;
5178 }
5179 if (GET_CODE (x) == PARALLEL)
5180 {
19cb6b50 5181 int j;
15bbde2b 5182 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5183 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5184 return CALL_INSN;
5185 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5186 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5187 return JUMP_INSN;
5188 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5189 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5190 return CALL_INSN;
5191 }
5192 return INSN;
5193}
5194
5195/* Emit the rtl pattern X as an appropriate kind of insn.
5196 If X is a label, it is simply added into the insn chain. */
5197
5198rtx
35cb5232 5199emit (rtx x)
15bbde2b 5200{
5201 enum rtx_code code = classify_insn (x);
5202
611234b4 5203 switch (code)
15bbde2b 5204 {
611234b4 5205 case CODE_LABEL:
5206 return emit_label (x);
5207 case INSN:
5208 return emit_insn (x);
5209 case JUMP_INSN:
5210 {
5211 rtx insn = emit_jump_insn (x);
5212 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5213 return emit_barrier ();
5214 return insn;
5215 }
5216 case CALL_INSN:
5217 return emit_call_insn (x);
9845d120 5218 case DEBUG_INSN:
5219 return emit_debug_insn (x);
611234b4 5220 default:
5221 gcc_unreachable ();
15bbde2b 5222 }
15bbde2b 5223}
5224\f
1f3233d1 5225/* Space for free sequence stack entries. */
7035b2ab 5226static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
1f3233d1 5227
735f4358 5228/* Begin emitting insns to a sequence. If this sequence will contain
5229 something that might cause the compiler to pop arguments to function
5230 calls (because those pops have previously been deferred; see
5231 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5232 before calling this function. That will ensure that the deferred
5233 pops are not accidentally emitted in the middle of this sequence. */
15bbde2b 5234
5235void
35cb5232 5236start_sequence (void)
15bbde2b 5237{
5238 struct sequence_stack *tem;
5239
1f3233d1 5240 if (free_sequence_stack != NULL)
5241 {
5242 tem = free_sequence_stack;
5243 free_sequence_stack = tem->next;
5244 }
5245 else
25a27413 5246 tem = ggc_alloc<sequence_stack> ();
15bbde2b 5247
0a893c29 5248 tem->next = seq_stack;
06f9d6ef 5249 tem->first = get_insns ();
5250 tem->last = get_last_insn ();
15bbde2b 5251
0a893c29 5252 seq_stack = tem;
15bbde2b 5253
06f9d6ef 5254 set_first_insn (0);
5255 set_last_insn (0);
15bbde2b 5256}
5257
b49854c6 5258/* Set up the insn chain starting with FIRST as the current sequence,
5259 saving the previously current one. See the documentation for
5260 start_sequence for more information about how to use this function. */
15bbde2b 5261
5262void
35cb5232 5263push_to_sequence (rtx first)
15bbde2b 5264{
5265 rtx last;
5266
5267 start_sequence ();
5268
3c802a1e 5269 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5270 ;
15bbde2b 5271
06f9d6ef 5272 set_first_insn (first);
5273 set_last_insn (last);
15bbde2b 5274}
5275
28bf151d 5276/* Like push_to_sequence, but take the last insn as an argument to avoid
5277 looping through the list. */
5278
5279void
5280push_to_sequence2 (rtx first, rtx last)
5281{
5282 start_sequence ();
5283
06f9d6ef 5284 set_first_insn (first);
5285 set_last_insn (last);
28bf151d 5286}
5287
ab74c92f 5288/* Set up the outer-level insn chain
5289 as the current sequence, saving the previously current one. */
5290
5291void
35cb5232 5292push_topmost_sequence (void)
ab74c92f 5293{
2041cfd9 5294 struct sequence_stack *stack, *top = NULL;
ab74c92f 5295
5296 start_sequence ();
5297
0a893c29 5298 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5299 top = stack;
5300
06f9d6ef 5301 set_first_insn (top->first);
5302 set_last_insn (top->last);
ab74c92f 5303}
5304
5305/* After emitting to the outer-level insn chain, update the outer-level
5306 insn chain, and restore the previous saved state. */
5307
5308void
35cb5232 5309pop_topmost_sequence (void)
ab74c92f 5310{
2041cfd9 5311 struct sequence_stack *stack, *top = NULL;
ab74c92f 5312
0a893c29 5313 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5314 top = stack;
5315
06f9d6ef 5316 top->first = get_insns ();
5317 top->last = get_last_insn ();
ab74c92f 5318
5319 end_sequence ();
5320}
5321
15bbde2b 5322/* After emitting to a sequence, restore previous saved state.
5323
b49854c6 5324 To get the contents of the sequence just made, you must call
31d3e01c 5325 `get_insns' *before* calling here.
b49854c6 5326
5327 If the compiler might have deferred popping arguments while
5328 generating this sequence, and this sequence will not be immediately
5329 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 5330 before calling get_insns. That will ensure that the deferred
b49854c6 5331 pops are inserted into this sequence, and not into some random
5332 location in the instruction stream. See INHIBIT_DEFER_POP for more
5333 information about deferred popping of arguments. */
15bbde2b 5334
5335void
35cb5232 5336end_sequence (void)
15bbde2b 5337{
0a893c29 5338 struct sequence_stack *tem = seq_stack;
15bbde2b 5339
06f9d6ef 5340 set_first_insn (tem->first);
5341 set_last_insn (tem->last);
0a893c29 5342 seq_stack = tem->next;
15bbde2b 5343
1f3233d1 5344 memset (tem, 0, sizeof (*tem));
5345 tem->next = free_sequence_stack;
5346 free_sequence_stack = tem;
15bbde2b 5347}
5348
5349/* Return 1 if currently emitting into a sequence. */
5350
5351int
35cb5232 5352in_sequence_p (void)
15bbde2b 5353{
0a893c29 5354 return seq_stack != 0;
15bbde2b 5355}
15bbde2b 5356\f
02ebfa52 5357/* Put the various virtual registers into REGNO_REG_RTX. */
5358
2f3874ce 5359static void
b079a207 5360init_virtual_regs (void)
02ebfa52 5361{
b079a207 5362 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5363 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5364 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5365 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5366 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
60778e62 5367 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5368 = virtual_preferred_stack_boundary_rtx;
0a893c29 5369}
5370
928d57e3 5371\f
5372/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5373static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5374static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5375static int copy_insn_n_scratches;
5376
5377/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5378 copied an ASM_OPERANDS.
5379 In that case, it is the original input-operand vector. */
5380static rtvec orig_asm_operands_vector;
5381
5382/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5383 copied an ASM_OPERANDS.
5384 In that case, it is the copied input-operand vector. */
5385static rtvec copy_asm_operands_vector;
5386
5387/* Likewise for the constraints vector. */
5388static rtvec orig_asm_constraints_vector;
5389static rtvec copy_asm_constraints_vector;
5390
5391/* Recursively create a new copy of an rtx for copy_insn.
5392 This function differs from copy_rtx in that it handles SCRATCHes and
5393 ASM_OPERANDs properly.
5394 Normally, this function is not used directly; use copy_insn as front end.
5395 However, you could first copy an insn pattern with copy_insn and then use
5396 this function afterwards to properly copy any REG_NOTEs containing
5397 SCRATCHes. */
5398
5399rtx
35cb5232 5400copy_insn_1 (rtx orig)
928d57e3 5401{
19cb6b50 5402 rtx copy;
5403 int i, j;
5404 RTX_CODE code;
5405 const char *format_ptr;
928d57e3 5406
25e880b1 5407 if (orig == NULL)
5408 return NULL;
5409
928d57e3 5410 code = GET_CODE (orig);
5411
5412 switch (code)
5413 {
5414 case REG:
d7fce3c8 5415 case DEBUG_EXPR:
0349edce 5416 CASE_CONST_ANY:
928d57e3 5417 case SYMBOL_REF:
5418 case CODE_LABEL:
5419 case PC:
5420 case CC0:
e0691b9a 5421 case RETURN:
9cb2517e 5422 case SIMPLE_RETURN:
928d57e3 5423 return orig;
c09425a0 5424 case CLOBBER:
b291008a 5425 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5426 clobbers or clobbers of hard registers that originated as pseudos.
5427 This is needed to allow safe register renaming. */
5428 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5429 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
c09425a0 5430 return orig;
5431 break;
928d57e3 5432
5433 case SCRATCH:
5434 for (i = 0; i < copy_insn_n_scratches; i++)
5435 if (copy_insn_scratch_in[i] == orig)
5436 return copy_insn_scratch_out[i];
5437 break;
5438
5439 case CONST:
3072d30e 5440 if (shared_const_p (orig))
928d57e3 5441 return orig;
5442 break;
d823ba47 5443
928d57e3 5444 /* A MEM with a constant address is not sharable. The problem is that
5445 the constant address may need to be reloaded. If the mem is shared,
5446 then reloading one copy of this mem will cause all copies to appear
5447 to have been reloaded. */
5448
5449 default:
5450 break;
5451 }
5452
f2d0e9f1 5453 /* Copy the various flags, fields, and other information. We assume
5454 that all fields need copying, and then clear the fields that should
928d57e3 5455 not be copied. That is the sensible default behavior, and forces
5456 us to explicitly document why we are *not* copying a flag. */
f2d0e9f1 5457 copy = shallow_copy_rtx (orig);
928d57e3 5458
5459 /* We do not copy the USED flag, which is used as a mark bit during
5460 walks over the RTL. */
7c25cb91 5461 RTX_FLAG (copy, used) = 0;
928d57e3 5462
5463 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
6720e96c 5464 if (INSN_P (orig))
928d57e3 5465 {
7c25cb91 5466 RTX_FLAG (copy, jump) = 0;
5467 RTX_FLAG (copy, call) = 0;
5468 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5469 }
d823ba47 5470
928d57e3 5471 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5472
5473 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
f2d0e9f1 5474 switch (*format_ptr++)
5475 {
5476 case 'e':
5477 if (XEXP (orig, i) != NULL)
5478 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5479 break;
928d57e3 5480
f2d0e9f1 5481 case 'E':
5482 case 'V':
5483 if (XVEC (orig, i) == orig_asm_constraints_vector)
5484 XVEC (copy, i) = copy_asm_constraints_vector;
5485 else if (XVEC (orig, i) == orig_asm_operands_vector)
5486 XVEC (copy, i) = copy_asm_operands_vector;
5487 else if (XVEC (orig, i) != NULL)
5488 {
5489 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5490 for (j = 0; j < XVECLEN (copy, i); j++)
5491 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5492 }
5493 break;
928d57e3 5494
f2d0e9f1 5495 case 't':
5496 case 'w':
5497 case 'i':
5498 case 's':
5499 case 'S':
5500 case 'u':
5501 case '0':
5502 /* These are left unchanged. */
5503 break;
928d57e3 5504
f2d0e9f1 5505 default:
5506 gcc_unreachable ();
5507 }
928d57e3 5508
5509 if (code == SCRATCH)
5510 {
5511 i = copy_insn_n_scratches++;
611234b4 5512 gcc_assert (i < MAX_RECOG_OPERANDS);
928d57e3 5513 copy_insn_scratch_in[i] = orig;
5514 copy_insn_scratch_out[i] = copy;
5515 }
5516 else if (code == ASM_OPERANDS)
5517 {
d91f2122 5518 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5519 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5520 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5521 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5522 }
5523
5524 return copy;
5525}
5526
5527/* Create a new copy of an rtx.
5528 This function differs from copy_rtx in that it handles SCRATCHes and
5529 ASM_OPERANDs properly.
5530 INSN doesn't really have to be a full INSN; it could be just the
5531 pattern. */
5532rtx
35cb5232 5533copy_insn (rtx insn)
928d57e3 5534{
5535 copy_insn_n_scratches = 0;
5536 orig_asm_operands_vector = 0;
5537 orig_asm_constraints_vector = 0;
5538 copy_asm_operands_vector = 0;
5539 copy_asm_constraints_vector = 0;
5540 return copy_insn_1 (insn);
5541}
02ebfa52 5542
a9abe1f1 5543/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5544 on that assumption that INSN itself remains in its original place. */
5545
5546rtx
5547copy_delay_slot_insn (rtx insn)
5548{
5549 /* Copy INSN with its rtx_code, all its notes, location etc. */
5550 insn = copy_rtx (insn);
5551 INSN_UID (insn) = cur_insn_uid++;
5552 return insn;
5553}
5554
15bbde2b 5555/* Initialize data structures and variables in this file
5556 before generating rtl for each function. */
5557
5558void
35cb5232 5559init_emit (void)
15bbde2b 5560{
06f9d6ef 5561 set_first_insn (NULL);
5562 set_last_insn (NULL);
9845d120 5563 if (MIN_NONDEBUG_INSN_UID)
5564 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5565 else
5566 cur_insn_uid = 1;
5567 cur_debug_insn_uid = 1;
15bbde2b 5568 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
15bbde2b 5569 first_label_num = label_num;
0a893c29 5570 seq_stack = NULL;
15bbde2b 5571
15bbde2b 5572 /* Init the tables that describe all the pseudo regs. */
5573
fd6ffb7c 5574 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5575
fd6ffb7c 5576 crtl->emit.regno_pointer_align
2457c754 5577 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
d4c332ff 5578
25a27413 5579 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
fcdc122e 5580
936082bb 5581 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5582 memcpy (regno_reg_rtx,
679bcc8d 5583 initial_regno_reg_rtx,
90295bd2 5584 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5585
15bbde2b 5586 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
b079a207 5587 init_virtual_regs ();
888e0d33 5588
5589 /* Indicate that the virtual registers and stack locations are
5590 all pointers. */
e61a0a7f 5591 REG_POINTER (stack_pointer_rtx) = 1;
5592 REG_POINTER (frame_pointer_rtx) = 1;
5593 REG_POINTER (hard_frame_pointer_rtx) = 1;
5594 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5595
e61a0a7f 5596 REG_POINTER (virtual_incoming_args_rtx) = 1;
5597 REG_POINTER (virtual_stack_vars_rtx) = 1;
5598 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5599 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5600 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5601
d4c332ff 5602#ifdef STACK_BOUNDARY
80909c64 5603 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5604 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5605 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5606 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5607
5608 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5609 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5610 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5611 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5612 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5613#endif
5614
89525da0 5615#ifdef INIT_EXPANDERS
5616 INIT_EXPANDERS;
5617#endif
15bbde2b 5618}
5619
6e68dcb2 5620/* Generate a vector constant for mode MODE and constant value CONSTANT. */
886cfd4f 5621
5622static rtx
6e68dcb2 5623gen_const_vector (enum machine_mode mode, int constant)
886cfd4f 5624{
5625 rtx tem;
5626 rtvec v;
5627 int units, i;
5628 enum machine_mode inner;
5629
5630 units = GET_MODE_NUNITS (mode);
5631 inner = GET_MODE_INNER (mode);
5632
069b07bf 5633 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5634
886cfd4f 5635 v = rtvec_alloc (units);
5636
6e68dcb2 5637 /* We need to call this function after we set the scalar const_tiny_rtx
5638 entries. */
5639 gcc_assert (const_tiny_rtx[constant][(int) inner]);
886cfd4f 5640
5641 for (i = 0; i < units; ++i)
6e68dcb2 5642 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
886cfd4f 5643
9426b612 5644 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5645 return tem;
5646}
5647
9426b612 5648/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6e68dcb2 5649 all elements are zero, and the one vector when all elements are one. */
9426b612 5650rtx
35cb5232 5651gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
9426b612 5652{
6e68dcb2 5653 enum machine_mode inner = GET_MODE_INNER (mode);
5654 int nunits = GET_MODE_NUNITS (mode);
5655 rtx x;
9426b612 5656 int i;
5657
6e68dcb2 5658 /* Check to see if all of the elements have the same value. */
5659 x = RTVEC_ELT (v, nunits - 1);
5660 for (i = nunits - 2; i >= 0; i--)
5661 if (RTVEC_ELT (v, i) != x)
5662 break;
5663
5664 /* If the values are all the same, check to see if we can use one of the
5665 standard constant vectors. */
5666 if (i == -1)
5667 {
5668 if (x == CONST0_RTX (inner))
5669 return CONST0_RTX (mode);
5670 else if (x == CONST1_RTX (inner))
5671 return CONST1_RTX (mode);
ba8dfb08 5672 else if (x == CONSTM1_RTX (inner))
5673 return CONSTM1_RTX (mode);
6e68dcb2 5674 }
5675
5676 return gen_rtx_raw_CONST_VECTOR (mode, v);
9426b612 5677}
5678
6d8b68a3 5679/* Initialise global register information required by all functions. */
5680
5681void
5682init_emit_regs (void)
5683{
5684 int i;
d83fcaa1 5685 enum machine_mode mode;
5686 mem_attrs *attrs;
6d8b68a3 5687
5688 /* Reset register attributes */
5689 htab_empty (reg_attrs_htab);
5690
5691 /* We need reg_raw_mode, so initialize the modes now. */
5692 init_reg_modes_target ();
5693
5694 /* Assign register numbers to the globally defined register rtx. */
6d8b68a3 5695 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5696 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5697 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5698 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5699 virtual_incoming_args_rtx =
5700 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5701 virtual_stack_vars_rtx =
5702 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5703 virtual_stack_dynamic_rtx =
5704 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5705 virtual_outgoing_args_rtx =
5706 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5707 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
60778e62 5708 virtual_preferred_stack_boundary_rtx =
5709 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6d8b68a3 5710
5711 /* Initialize RTL for commonly used hard registers. These are
5712 copied into regno_reg_rtx as we begin to compile each function. */
5713 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
679bcc8d 5714 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6d8b68a3 5715
5716#ifdef RETURN_ADDRESS_POINTER_REGNUM
5717 return_address_pointer_rtx
5718 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5719#endif
5720
6d8b68a3 5721 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5722 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5723 else
5724 pic_offset_table_rtx = NULL_RTX;
d83fcaa1 5725
5726 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5727 {
5728 mode = (enum machine_mode) i;
25a27413 5729 attrs = ggc_cleared_alloc<mem_attrs> ();
d83fcaa1 5730 attrs->align = BITS_PER_UNIT;
5731 attrs->addrspace = ADDR_SPACE_GENERIC;
5732 if (mode != BLKmode)
5733 {
6d58bcba 5734 attrs->size_known_p = true;
5735 attrs->size = GET_MODE_SIZE (mode);
d83fcaa1 5736 if (STRICT_ALIGNMENT)
5737 attrs->align = GET_MODE_ALIGNMENT (mode);
5738 }
5739 mode_mem_attrs[i] = attrs;
5740 }
6d8b68a3 5741}
5742
8059b95a 5743/* Initialize global machine_mode variables. */
5744
5745void
5746init_derived_machine_modes (void)
5747{
5748 byte_mode = VOIDmode;
5749 word_mode = VOIDmode;
5750
5751 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5752 mode != VOIDmode;
5753 mode = GET_MODE_WIDER_MODE (mode))
5754 {
5755 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5756 && byte_mode == VOIDmode)
5757 byte_mode = mode;
5758
5759 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5760 && word_mode == VOIDmode)
5761 word_mode = mode;
5762 }
5763
5764 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5765}
5766
01703575 5767/* Create some permanent unique rtl objects shared between all functions. */
15bbde2b 5768
5769void
01703575 5770init_emit_once (void)
15bbde2b 5771{
5772 int i;
5773 enum machine_mode mode;
9e042f31 5774 enum machine_mode double_mode;
15bbde2b 5775
e913b5cd 5776 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5777 CONST_FIXED, and memory attribute hash tables. */
573aba85 5778 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5779 const_int_htab_eq, NULL);
c6259b83 5780
e913b5cd 5781#if TARGET_SUPPORTS_WIDE_INT
5782 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5783 const_wide_int_htab_eq, NULL);
5784#endif
573aba85 5785 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5786 const_double_htab_eq, NULL);
2ff23ed0 5787
e397ad8e 5788 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5789 const_fixed_htab_eq, NULL);
5790
ca74b940 5791 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5792 reg_attrs_htab_eq, NULL);
77695070 5793
57c097d5 5794#ifdef INIT_EXPANDERS
ab5beff9 5795 /* This is to initialize {init|mark|free}_machine_status before the first
5796 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5797 end which calls push_function_context_to before the first call to
57c097d5 5798 init_function_start. */
5799 INIT_EXPANDERS;
5800#endif
5801
15bbde2b 5802 /* Create the unique rtx's for certain rtx codes and operand values. */
5803
8fd5918e 5804 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
7014838c 5805 tries to use these variables. */
15bbde2b 5806 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5807 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5808 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5809
1a60f06a 5810 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5811 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5812 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5813 else
3ad7bb1c 5814 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5815
8059b95a 5816 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5817
cc69d08a 5818 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5819 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5820 real_from_integer (&dconst2, double_mode, 2, SIGNED);
3fa759a9 5821
5822 dconstm1 = dconst1;
5823 dconstm1.sign = 1;
77e89269 5824
5825 dconsthalf = dconst1;
9d96125b 5826 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
15bbde2b 5827
ba8dfb08 5828 for (i = 0; i < 3; i++)
15bbde2b 5829 {
3fa759a9 5830 const REAL_VALUE_TYPE *const r =
badfe841 5831 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5832
069b07bf 5833 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5834 mode != VOIDmode;
5835 mode = GET_MODE_WIDER_MODE (mode))
5836 const_tiny_rtx[i][(int) mode] =
5837 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5838
5839 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5840 mode != VOIDmode;
15bbde2b 5841 mode = GET_MODE_WIDER_MODE (mode))
2ff23ed0 5842 const_tiny_rtx[i][(int) mode] =
5843 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
15bbde2b 5844
b572011e 5845 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5846
069b07bf 5847 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5848 mode != VOIDmode;
15bbde2b 5849 mode = GET_MODE_WIDER_MODE (mode))
b572011e 5850 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5851
8c20007a 5852 for (mode = MIN_MODE_PARTIAL_INT;
5853 mode <= MAX_MODE_PARTIAL_INT;
5854 mode = (enum machine_mode)((int)(mode) + 1))
7540dcc4 5855 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5856 }
5857
ba8dfb08 5858 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5859
5860 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5861 mode != VOIDmode;
5862 mode = GET_MODE_WIDER_MODE (mode))
5863 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5864
8c20007a 5865 for (mode = MIN_MODE_PARTIAL_INT;
5866 mode <= MAX_MODE_PARTIAL_INT;
5867 mode = (enum machine_mode)((int)(mode) + 1))
dd276d20 5868 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5869
4248fc32 5870 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5871 mode != VOIDmode;
5872 mode = GET_MODE_WIDER_MODE (mode))
5873 {
5874 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5875 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5876 }
5877
5878 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5879 mode != VOIDmode;
5880 mode = GET_MODE_WIDER_MODE (mode))
5881 {
5882 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5883 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5884 }
5885
886cfd4f 5886 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5887 mode != VOIDmode;
5888 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5889 {
5890 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5891 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
ba8dfb08 5892 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6e68dcb2 5893 }
886cfd4f 5894
5895 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5896 mode != VOIDmode;
5897 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5898 {
5899 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5900 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5901 }
886cfd4f 5902
06f0b99c 5903 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5904 mode != VOIDmode;
5905 mode = GET_MODE_WIDER_MODE (mode))
5906 {
9af5ce0c 5907 FCONST0 (mode).data.high = 0;
5908 FCONST0 (mode).data.low = 0;
5909 FCONST0 (mode).mode = mode;
e397ad8e 5910 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5911 FCONST0 (mode), mode);
06f0b99c 5912 }
5913
5914 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5915 mode != VOIDmode;
5916 mode = GET_MODE_WIDER_MODE (mode))
5917 {
9af5ce0c 5918 FCONST0 (mode).data.high = 0;
5919 FCONST0 (mode).data.low = 0;
5920 FCONST0 (mode).mode = mode;
e397ad8e 5921 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5922 FCONST0 (mode), mode);
06f0b99c 5923 }
5924
5925 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5926 mode != VOIDmode;
5927 mode = GET_MODE_WIDER_MODE (mode))
5928 {
9af5ce0c 5929 FCONST0 (mode).data.high = 0;
5930 FCONST0 (mode).data.low = 0;
5931 FCONST0 (mode).mode = mode;
e397ad8e 5932 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5933 FCONST0 (mode), mode);
06f0b99c 5934
5935 /* We store the value 1. */
9af5ce0c 5936 FCONST1 (mode).data.high = 0;
5937 FCONST1 (mode).data.low = 0;
5938 FCONST1 (mode).mode = mode;
5939 FCONST1 (mode).data
d67b7119 5940 = double_int_one.lshift (GET_MODE_FBIT (mode),
5941 HOST_BITS_PER_DOUBLE_INT,
5942 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5943 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5944 FCONST1 (mode), mode);
06f0b99c 5945 }
5946
5947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5948 mode != VOIDmode;
5949 mode = GET_MODE_WIDER_MODE (mode))
5950 {
9af5ce0c 5951 FCONST0 (mode).data.high = 0;
5952 FCONST0 (mode).data.low = 0;
5953 FCONST0 (mode).mode = mode;
e397ad8e 5954 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5955 FCONST0 (mode), mode);
06f0b99c 5956
5957 /* We store the value 1. */
9af5ce0c 5958 FCONST1 (mode).data.high = 0;
5959 FCONST1 (mode).data.low = 0;
5960 FCONST1 (mode).mode = mode;
5961 FCONST1 (mode).data
d67b7119 5962 = double_int_one.lshift (GET_MODE_FBIT (mode),
5963 HOST_BITS_PER_DOUBLE_INT,
5964 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5965 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5966 FCONST1 (mode), mode);
5967 }
5968
5969 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5970 mode != VOIDmode;
5971 mode = GET_MODE_WIDER_MODE (mode))
5972 {
5973 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5974 }
5975
5976 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5977 mode != VOIDmode;
5978 mode = GET_MODE_WIDER_MODE (mode))
5979 {
5980 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5981 }
5982
5983 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5984 mode != VOIDmode;
5985 mode = GET_MODE_WIDER_MODE (mode))
5986 {
5987 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5988 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5989 }
5990
5991 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5992 mode != VOIDmode;
5993 mode = GET_MODE_WIDER_MODE (mode))
5994 {
5995 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5996 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
06f0b99c 5997 }
5998
0fd4500a 5999 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6000 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6001 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 6002
065336b4 6003 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6004 if (STORE_FLAG_VALUE == 1)
6005 const_tiny_rtx[1][(int) BImode] = const1_rtx;
7d7b0bac 6006
6007 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6008 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6009 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6010 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
15bbde2b 6011}
ac6c481d 6012\f
cd0fe062 6013/* Produce exact duplicate of insn INSN after AFTER.
6014 Care updating of libcall regions if present. */
6015
6016rtx
35cb5232 6017emit_copy_of_insn_after (rtx insn, rtx after)
cd0fe062 6018{
9ce37fa7 6019 rtx new_rtx, link;
cd0fe062 6020
6021 switch (GET_CODE (insn))
6022 {
6023 case INSN:
9ce37fa7 6024 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6025 break;
6026
6027 case JUMP_INSN:
9ce37fa7 6028 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6029 break;
6030
9845d120 6031 case DEBUG_INSN:
6032 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6033 break;
6034
cd0fe062 6035 case CALL_INSN:
9ce37fa7 6036 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6037 if (CALL_INSN_FUNCTION_USAGE (insn))
9ce37fa7 6038 CALL_INSN_FUNCTION_USAGE (new_rtx)
cd0fe062 6039 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
9ce37fa7 6040 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6041 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6042 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
48e1416a 6043 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
9c2a0c05 6044 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
cd0fe062 6045 break;
6046
6047 default:
611234b4 6048 gcc_unreachable ();
cd0fe062 6049 }
6050
6051 /* Update LABEL_NUSES. */
9ce37fa7 6052 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
cd0fe062 6053
5169661d 6054 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ab87d1bc 6055
98116afd 6056 /* If the old insn is frame related, then so is the new one. This is
6057 primarily needed for IA-64 unwind info which marks epilogue insns,
6058 which may be duplicated by the basic block reordering code. */
9ce37fa7 6059 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
98116afd 6060
19d2fe05 6061 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6062 will make them. REG_LABEL_TARGETs are created there too, but are
6063 supposed to be sticky, so we copy them. */
cd0fe062 6064 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
19d2fe05 6065 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
cd0fe062 6066 {
6067 if (GET_CODE (link) == EXPR_LIST)
9ce37fa7 6068 add_reg_note (new_rtx, REG_NOTE_KIND (link),
a1ddb869 6069 copy_insn_1 (XEXP (link, 0)));
cd0fe062 6070 else
9eb946de 6071 add_shallow_copy_of_reg_note (new_rtx, link);
cd0fe062 6072 }
6073
9ce37fa7 6074 INSN_CODE (new_rtx) = INSN_CODE (insn);
6075 return new_rtx;
cd0fe062 6076}
1f3233d1 6077
7035b2ab 6078static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
c09425a0 6079rtx
6080gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6081{
6082 if (hard_reg_clobbers[mode][regno])
6083 return hard_reg_clobbers[mode][regno];
6084 else
6085 return (hard_reg_clobbers[mode][regno] =
6086 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6087}
6088
5169661d 6089location_t prologue_location;
6090location_t epilogue_location;
23a070f3 6091
6092/* Hold current location information and last location information, so the
6093 datastructures are built lazily only when some instructions in given
6094 place are needed. */
c7abeac5 6095static location_t curr_location;
23a070f3 6096
5169661d 6097/* Allocate insn location datastructure. */
23a070f3 6098void
5169661d 6099insn_locations_init (void)
23a070f3 6100{
5169661d 6101 prologue_location = epilogue_location = 0;
23a070f3 6102 curr_location = UNKNOWN_LOCATION;
23a070f3 6103}
6104
6105/* At the end of emit stage, clear current location. */
6106void
5169661d 6107insn_locations_finalize (void)
23a070f3 6108{
5169661d 6109 epilogue_location = curr_location;
6110 curr_location = UNKNOWN_LOCATION;
23a070f3 6111}
6112
6113/* Set current location. */
6114void
5169661d 6115set_curr_insn_location (location_t location)
23a070f3 6116{
23a070f3 6117 curr_location = location;
6118}
6119
6120/* Get current location. */
6121location_t
5169661d 6122curr_insn_location (void)
23a070f3 6123{
6124 return curr_location;
6125}
6126
23a070f3 6127/* Return lexical scope block insn belongs to. */
6128tree
6129insn_scope (const_rtx insn)
6130{
5169661d 6131 return LOCATION_BLOCK (INSN_LOCATION (insn));
23a070f3 6132}
6133
6134/* Return line number of the statement that produced this insn. */
6135int
6136insn_line (const_rtx insn)
6137{
5169661d 6138 return LOCATION_LINE (INSN_LOCATION (insn));
23a070f3 6139}
6140
6141/* Return source file of the statement that produced this insn. */
6142const char *
6143insn_file (const_rtx insn)
6144{
5169661d 6145 return LOCATION_FILE (INSN_LOCATION (insn));
23a070f3 6146}
30c3c442 6147
6148/* Return true if memory model MODEL requires a pre-operation (release-style)
6149 barrier or a post-operation (acquire-style) barrier. While not universal,
6150 this function matches behavior of several targets. */
6151
6152bool
6153need_atomic_barrier_p (enum memmodel model, bool pre)
6154{
1a9fa1dd 6155 switch (model & MEMMODEL_MASK)
30c3c442 6156 {
6157 case MEMMODEL_RELAXED:
6158 case MEMMODEL_CONSUME:
6159 return false;
6160 case MEMMODEL_RELEASE:
6161 return pre;
6162 case MEMMODEL_ACQUIRE:
6163 return !pre;
6164 case MEMMODEL_ACQ_REL:
6165 case MEMMODEL_SEQ_CST:
6166 return true;
6167 default:
6168 gcc_unreachable ();
6169 }
6170}
6171\f
1f3233d1 6172#include "gt-emit-rtl.h"