]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
Restore whitespace.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
711789cc 2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
15bbde2b 3
f12b58b3 4This file is part of GCC.
15bbde2b 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
15bbde2b 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15bbde2b 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
15bbde2b 19
20
21/* Middle-to-low level generation of rtx code and insns.
22
74efa612 23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
15bbde2b 25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
74efa612 28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
8fd5918e 31 dependent is the kind of rtx's they make and what arguments they
32 use. */
15bbde2b 33
34#include "config.h"
405711de 35#include "system.h"
805e22b2 36#include "coretypes.h"
37#include "tm.h"
0b205f4c 38#include "diagnostic-core.h"
15bbde2b 39#include "rtl.h"
3fd7e17f 40#include "tree.h"
9ed99284 41#include "varasm.h"
42#include "gimple.h"
7953c610 43#include "tm_p.h"
15bbde2b 44#include "flags.h"
45#include "function.h"
9ed99284 46#include "stringpool.h"
15bbde2b 47#include "expr.h"
48#include "regs.h"
c6b6c51f 49#include "hard-reg-set.h"
73f5c1e3 50#include "hashtab.h"
15bbde2b 51#include "insn-config.h"
0dbd1c74 52#include "recog.h"
a3426c4c 53#include "bitmap.h"
f3d96a58 54#include "basic-block.h"
a7b0c170 55#include "ggc.h"
b29760a8 56#include "debug.h"
b0278d39 57#include "langhooks.h"
3072d30e 58#include "df.h"
9845d120 59#include "params.h"
98155838 60#include "target.h"
4e57e76d 61#include "tree-eh.h"
649d8da6 62
679bcc8d 63struct target_rtl default_target_rtl;
64#if SWITCHABLE_TARGET
65struct target_rtl *this_target_rtl = &default_target_rtl;
66#endif
67
68#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
399d45d3 70/* Commonly used modes. */
71
a92771b8 72enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9e042f31 74enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
a92771b8 75enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 76
b079a207 77/* Datastructures maintained for currently processed function in RTL form. */
78
fd6ffb7c 79struct rtl_data x_rtl;
b079a207 80
81/* Indexed by pseudo register number, gives the rtx for that pseudo.
48e1416a 82 Allocated in parallel with regno_pointer_align.
b079a207 83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86rtx * regno_reg_rtx;
15bbde2b 87
88/* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
9105005a 91static GTY(()) int label_num = 1;
15bbde2b 92
15bbde2b 93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
ba8dfb08 95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
15bbde2b 97
ba8dfb08 98rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
15bbde2b 99
1a60f06a 100rtx const_true_rtx;
101
15bbde2b 102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
77e89269 106REAL_VALUE_TYPE dconsthalf;
15bbde2b 107
06f0b99c 108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
15bbde2b 112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
57c097d5 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 118
7d7b0bac 119/* Standard pieces of rtx, to be substituted directly into things. */
120rtx pc_rtx;
121rtx ret_rtx;
122rtx simple_return_rtx;
123rtx cc0_rtx;
124
73f5c1e3 125/* A hash table storing CONST_INTs whose absolute value is greater
126 than MAX_SAVED_CONST_INT. */
127
1f3233d1 128static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t const_int_htab;
73f5c1e3 130
e913b5cd 131static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
132 htab_t const_wide_int_htab;
133
c6259b83 134/* A hash table storing memory attribute structures. */
1f3233d1 135static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
136 htab_t mem_attrs_htab;
c6259b83 137
ca74b940 138/* A hash table storing register attribute structures. */
139static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
140 htab_t reg_attrs_htab;
141
2ff23ed0 142/* A hash table storing all CONST_DOUBLEs. */
1f3233d1 143static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
144 htab_t const_double_htab;
2ff23ed0 145
e397ad8e 146/* A hash table storing all CONST_FIXEDs. */
147static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
148 htab_t const_fixed_htab;
149
fd6ffb7c 150#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
9845d120 151#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
fd6ffb7c 152#define first_label_num (crtl->emit.x_first_label_num)
15bbde2b 153
35cb5232 154static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
265be050 155static void set_used_decls (tree);
35cb5232 156static void mark_label_nuses (rtx);
157static hashval_t const_int_htab_hash (const void *);
158static int const_int_htab_eq (const void *, const void *);
e913b5cd 159#if TARGET_SUPPORTS_WIDE_INT
160static hashval_t const_wide_int_htab_hash (const void *);
161static int const_wide_int_htab_eq (const void *, const void *);
162static rtx lookup_const_wide_int (rtx);
163#endif
35cb5232 164static hashval_t const_double_htab_hash (const void *);
165static int const_double_htab_eq (const void *, const void *);
166static rtx lookup_const_double (rtx);
e397ad8e 167static hashval_t const_fixed_htab_hash (const void *);
168static int const_fixed_htab_eq (const void *, const void *);
169static rtx lookup_const_fixed (rtx);
35cb5232 170static hashval_t mem_attrs_htab_hash (const void *);
171static int mem_attrs_htab_eq (const void *, const void *);
35cb5232 172static hashval_t reg_attrs_htab_hash (const void *);
173static int reg_attrs_htab_eq (const void *, const void *);
174static reg_attrs *get_reg_attrs (tree, int);
6e68dcb2 175static rtx gen_const_vector (enum machine_mode, int);
0e0727c4 176static void copy_rtx_if_shared_1 (rtx *orig);
73f5c1e3 177
3cd757b1 178/* Probability of the conditional branch currently proceeded by try_split.
179 Set to -1 otherwise. */
180int split_branch_probability = -1;
649d8da6 181\f
73f5c1e3 182/* Returns a hash code for X (which is a really a CONST_INT). */
183
184static hashval_t
35cb5232 185const_int_htab_hash (const void *x)
73f5c1e3 186{
dd9b9fc5 187 return (hashval_t) INTVAL ((const_rtx) x);
73f5c1e3 188}
189
6ef828f9 190/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 191 CONST_INT) is the same as that given by Y (which is really a
192 HOST_WIDE_INT *). */
193
194static int
35cb5232 195const_int_htab_eq (const void *x, const void *y)
73f5c1e3 196{
dd9b9fc5 197 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
2ff23ed0 198}
199
e913b5cd 200#if TARGET_SUPPORTS_WIDE_INT
201/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
202
203static hashval_t
204const_wide_int_htab_hash (const void *x)
205{
206 int i;
207 HOST_WIDE_INT hash = 0;
208 const_rtx xr = (const_rtx) x;
209
210 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
211 hash += CONST_WIDE_INT_ELT (xr, i);
212
213 return (hashval_t) hash;
214}
215
216/* Returns nonzero if the value represented by X (which is really a
217 CONST_WIDE_INT) is the same as that given by Y (which is really a
218 CONST_WIDE_INT). */
219
220static int
221const_wide_int_htab_eq (const void *x, const void *y)
222{
223 int i;
224 const_rtx xr = (const_rtx)x;
225 const_rtx yr = (const_rtx)y;
226 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
227 return 0;
228
229 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
230 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
231 return 0;
232
233 return 1;
234}
235#endif
236
2ff23ed0 237/* Returns a hash code for X (which is really a CONST_DOUBLE). */
238static hashval_t
35cb5232 239const_double_htab_hash (const void *x)
2ff23ed0 240{
dd9b9fc5 241 const_rtx const value = (const_rtx) x;
3393215f 242 hashval_t h;
2ff23ed0 243
e913b5cd 244 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
3393215f 245 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
246 else
a5760913 247 {
e2e205b3 248 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 249 /* MODE is used in the comparison, so it should be in the hash. */
250 h ^= GET_MODE (value);
251 }
2ff23ed0 252 return h;
253}
254
6ef828f9 255/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 256 is the same as that represented by Y (really a ...) */
257static int
35cb5232 258const_double_htab_eq (const void *x, const void *y)
2ff23ed0 259{
dd9b9fc5 260 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
2ff23ed0 261
262 if (GET_MODE (a) != GET_MODE (b))
263 return 0;
e913b5cd 264 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
f82a103d 265 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
266 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
267 else
268 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
269 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 270}
271
e397ad8e 272/* Returns a hash code for X (which is really a CONST_FIXED). */
273
274static hashval_t
275const_fixed_htab_hash (const void *x)
276{
a9f1838b 277 const_rtx const value = (const_rtx) x;
e397ad8e 278 hashval_t h;
279
280 h = fixed_hash (CONST_FIXED_VALUE (value));
281 /* MODE is used in the comparison, so it should be in the hash. */
282 h ^= GET_MODE (value);
283 return h;
284}
285
286/* Returns nonzero if the value represented by X (really a ...)
287 is the same as that represented by Y (really a ...). */
288
289static int
290const_fixed_htab_eq (const void *x, const void *y)
291{
a9f1838b 292 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
e397ad8e 293
294 if (GET_MODE (a) != GET_MODE (b))
295 return 0;
296 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
297}
298
c6259b83 299/* Returns a hash code for X (which is a really a mem_attrs *). */
300
301static hashval_t
35cb5232 302mem_attrs_htab_hash (const void *x)
c6259b83 303{
dd9b9fc5 304 const mem_attrs *const p = (const mem_attrs *) x;
c6259b83 305
306 return (p->alias ^ (p->align * 1000)
bd1a81f7 307 ^ (p->addrspace * 4000)
6d58bcba 308 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
309 ^ ((p->size_known_p ? p->size : 0) * 2500000)
2f16183e 310 ^ (size_t) iterative_hash_expr (p->expr, 0));
c6259b83 311}
312
d72886b5 313/* Return true if the given memory attributes are equal. */
73f5c1e3 314
d72886b5 315static bool
316mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
73f5c1e3 317{
6d58bcba 318 return (p->alias == q->alias
319 && p->offset_known_p == q->offset_known_p
320 && (!p->offset_known_p || p->offset == q->offset)
321 && p->size_known_p == q->size_known_p
322 && (!p->size_known_p || p->size == q->size)
323 && p->align == q->align
bd1a81f7 324 && p->addrspace == q->addrspace
2f16183e 325 && (p->expr == q->expr
326 || (p->expr != NULL_TREE && q->expr != NULL_TREE
327 && operand_equal_p (p->expr, q->expr, 0))));
73f5c1e3 328}
329
d72886b5 330/* Returns nonzero if the value represented by X (which is really a
331 mem_attrs *) is the same as that given by Y (which is also really a
332 mem_attrs *). */
c6259b83 333
d72886b5 334static int
335mem_attrs_htab_eq (const void *x, const void *y)
c6259b83 336{
d72886b5 337 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
338}
c6259b83 339
d72886b5 340/* Set MEM's memory attributes so that they are the same as ATTRS. */
5cc193e7 341
d72886b5 342static void
343set_mem_attrs (rtx mem, mem_attrs *attrs)
344{
345 void **slot;
346
347 /* If everything is the default, we can just clear the attributes. */
348 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
349 {
350 MEM_ATTRS (mem) = 0;
351 return;
352 }
c6259b83 353
d72886b5 354 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
c6259b83 355 if (*slot == 0)
356 {
ba72912a 357 *slot = ggc_alloc_mem_attrs ();
d72886b5 358 memcpy (*slot, attrs, sizeof (mem_attrs));
c6259b83 359 }
360
d72886b5 361 MEM_ATTRS (mem) = (mem_attrs *) *slot;
73f5c1e3 362}
363
ca74b940 364/* Returns a hash code for X (which is a really a reg_attrs *). */
365
366static hashval_t
35cb5232 367reg_attrs_htab_hash (const void *x)
ca74b940 368{
aae87fc3 369 const reg_attrs *const p = (const reg_attrs *) x;
ca74b940 370
e19e0a33 371 return ((p->offset * 1000) ^ (intptr_t) p->decl);
ca74b940 372}
373
7ef5b942 374/* Returns nonzero if the value represented by X (which is really a
ca74b940 375 reg_attrs *) is the same as that given by Y (which is also really a
376 reg_attrs *). */
377
378static int
35cb5232 379reg_attrs_htab_eq (const void *x, const void *y)
ca74b940 380{
aae87fc3 381 const reg_attrs *const p = (const reg_attrs *) x;
382 const reg_attrs *const q = (const reg_attrs *) y;
ca74b940 383
384 return (p->decl == q->decl && p->offset == q->offset);
385}
386/* Allocate a new reg_attrs structure and insert it into the hash table if
387 one identical to it is not already in the table. We are doing this for
388 MEM of mode MODE. */
389
390static reg_attrs *
35cb5232 391get_reg_attrs (tree decl, int offset)
ca74b940 392{
393 reg_attrs attrs;
394 void **slot;
395
396 /* If everything is the default, we can just return zero. */
397 if (decl == 0 && offset == 0)
398 return 0;
399
400 attrs.decl = decl;
401 attrs.offset = offset;
402
403 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
404 if (*slot == 0)
405 {
ba72912a 406 *slot = ggc_alloc_reg_attrs ();
ca74b940 407 memcpy (*slot, &attrs, sizeof (reg_attrs));
408 }
409
2457c754 410 return (reg_attrs *) *slot;
ca74b940 411}
412
3072d30e 413
414#if !HAVE_blockage
e12b44a3 415/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
416 and to block register equivalences to be seen across this insn. */
3072d30e 417
418rtx
419gen_blockage (void)
420{
421 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
422 MEM_VOLATILE_P (x) = true;
423 return x;
424}
425#endif
426
427
22cf44bc 428/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
429 don't attempt to share with the various global pieces of rtl (such as
430 frame_pointer_rtx). */
431
432rtx
35cb5232 433gen_raw_REG (enum machine_mode mode, int regno)
22cf44bc 434{
435 rtx x = gen_rtx_raw_REG (mode, regno);
436 ORIGINAL_REGNO (x) = regno;
437 return x;
438}
439
7014838c 440/* There are some RTL codes that require special attention; the generation
441 functions do the raw handling. If you add to this list, modify
442 special_rtx in gengenrtl.c as well. */
443
3ad7bb1c 444rtx
35cb5232 445gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3ad7bb1c 446{
73f5c1e3 447 void **slot;
448
3ad7bb1c 449 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 450 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 451
452#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
453 if (const_true_rtx && arg == STORE_FLAG_VALUE)
454 return const_true_rtx;
455#endif
456
73f5c1e3 457 /* Look up the CONST_INT in the hash table. */
2b3dbc20 458 slot = htab_find_slot_with_hash (const_int_htab, &arg,
459 (hashval_t) arg, INSERT);
7f2875d3 460 if (*slot == 0)
d7c47c0e 461 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 462
463 return (rtx) *slot;
3ad7bb1c 464}
465
2d232d05 466rtx
35cb5232 467gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2d232d05 468{
469 return GEN_INT (trunc_int_for_mode (c, mode));
470}
471
2ff23ed0 472/* CONST_DOUBLEs might be created from pairs of integers, or from
473 REAL_VALUE_TYPEs. Also, their length is known only at run time,
474 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
475
476/* Determine whether REAL, a CONST_DOUBLE, already exists in the
477 hash table. If so, return its counterpart; otherwise add it
478 to the hash table and return it. */
479static rtx
35cb5232 480lookup_const_double (rtx real)
2ff23ed0 481{
482 void **slot = htab_find_slot (const_double_htab, real, INSERT);
483 if (*slot == 0)
484 *slot = real;
485
486 return (rtx) *slot;
487}
7f2875d3 488
2ff23ed0 489/* Return a CONST_DOUBLE rtx for a floating-point value specified by
490 VALUE in mode MODE. */
67f2a2eb 491rtx
35cb5232 492const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
67f2a2eb 493{
2ff23ed0 494 rtx real = rtx_alloc (CONST_DOUBLE);
495 PUT_MODE (real, mode);
496
e8aaae4e 497 real->u.rv = value;
2ff23ed0 498
499 return lookup_const_double (real);
500}
501
e397ad8e 502/* Determine whether FIXED, a CONST_FIXED, already exists in the
503 hash table. If so, return its counterpart; otherwise add it
504 to the hash table and return it. */
505
506static rtx
507lookup_const_fixed (rtx fixed)
508{
509 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
510 if (*slot == 0)
511 *slot = fixed;
512
513 return (rtx) *slot;
514}
515
516/* Return a CONST_FIXED rtx for a fixed-point value specified by
517 VALUE in mode MODE. */
518
519rtx
520const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
521{
522 rtx fixed = rtx_alloc (CONST_FIXED);
523 PUT_MODE (fixed, mode);
524
525 fixed->u.fv = value;
526
527 return lookup_const_fixed (fixed);
528}
529
e913b5cd 530#if TARGET_SUPPORTS_WIDE_INT == 0
33274180 531/* Constructs double_int from rtx CST. */
532
533double_int
534rtx_to_double_int (const_rtx cst)
535{
536 double_int r;
537
538 if (CONST_INT_P (cst))
cf8f0e63 539 r = double_int::from_shwi (INTVAL (cst));
78f1962f 540 else if (CONST_DOUBLE_AS_INT_P (cst))
33274180 541 {
542 r.low = CONST_DOUBLE_LOW (cst);
543 r.high = CONST_DOUBLE_HIGH (cst);
544 }
545 else
546 gcc_unreachable ();
547
548 return r;
549}
e913b5cd 550#endif
551
552#if TARGET_SUPPORTS_WIDE_INT
a342dbb2 553/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
554 If so, return its counterpart; otherwise add it to the hash table and
e913b5cd 555 return it. */
33274180 556
e913b5cd 557static rtx
558lookup_const_wide_int (rtx wint)
559{
560 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
561 if (*slot == 0)
562 *slot = wint;
33274180 563
e913b5cd 564 return (rtx) *slot;
565}
566#endif
3e052aec 567
a342dbb2 568/* Return an rtx constant for V, given that the constant has mode MODE.
569 The returned rtx will be a CONST_INT if V fits, otherwise it will be
570 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
571 (if TARGET_SUPPORTS_WIDE_INT). */
572
3e052aec 573rtx
e913b5cd 574immed_wide_int_const (const wide_int &v, enum machine_mode mode)
3e052aec 575{
e913b5cd 576 unsigned int len = v.get_len ();
577 unsigned int prec = GET_MODE_PRECISION (mode);
578
579 /* Allow truncation but not extension since we do not know if the
580 number is signed or unsigned. */
581 gcc_assert (prec <= v.get_precision ());
582
583 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
584 return gen_int_mode (v.elt (0), mode);
585
586#if TARGET_SUPPORTS_WIDE_INT
587 {
588 unsigned int i;
589 rtx value;
590 unsigned int blocks_needed
591 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
592
593 if (len > blocks_needed)
594 len = blocks_needed;
595
596 value = const_wide_int_alloc (len);
597
598 /* It is so tempting to just put the mode in here. Must control
599 myself ... */
600 PUT_MODE (value, VOIDmode);
05c25ee6 601 CWI_PUT_NUM_ELEM (value, len);
e913b5cd 602
603 for (i = 0; i < len; i++)
05363b4a 604 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
e913b5cd 605
606 return lookup_const_wide_int (value);
607 }
608#else
05363b4a 609 return immed_double_const (v.elt (0), v.elt (1), mode);
e913b5cd 610#endif
3e052aec 611}
612
e913b5cd 613#if TARGET_SUPPORTS_WIDE_INT == 0
2ff23ed0 614/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
615 of ints: I0 is the low-order word and I1 is the high-order word.
24cd46a7 616 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
db20fb47 617 implied upper bits are copies of the high bit of i1. The value
618 itself is neither signed nor unsigned. Do not use this routine for
619 non-integer modes; convert to REAL_VALUE_TYPE and use
620 CONST_DOUBLE_FROM_REAL_VALUE. */
2ff23ed0 621
622rtx
35cb5232 623immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
2ff23ed0 624{
625 rtx value;
626 unsigned int i;
627
b1ca4af4 628 /* There are the following cases (note that there are no modes with
24cd46a7 629 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
b1ca4af4 630
631 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
632 gen_int_mode.
db20fb47 633 2) If the value of the integer fits into HOST_WIDE_INT anyway
634 (i.e., i1 consists only from copies of the sign bit, and sign
635 of i0 and i1 are the same), then we return a CONST_INT for i0.
b1ca4af4 636 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
2ff23ed0 637 if (mode != VOIDmode)
638 {
611234b4 639 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
640 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
641 /* We can get a 0 for an error mark. */
642 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
643 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
2ff23ed0 644
b1ca4af4 645 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
646 return gen_int_mode (i0, mode);
2ff23ed0 647 }
648
649 /* If this integer fits in one word, return a CONST_INT. */
650 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
651 return GEN_INT (i0);
652
653 /* We use VOIDmode for integers. */
654 value = rtx_alloc (CONST_DOUBLE);
655 PUT_MODE (value, VOIDmode);
656
657 CONST_DOUBLE_LOW (value) = i0;
658 CONST_DOUBLE_HIGH (value) = i1;
659
660 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
661 XWINT (value, i) = 0;
662
663 return lookup_const_double (value);
67f2a2eb 664}
e913b5cd 665#endif
67f2a2eb 666
3ad7bb1c 667rtx
35cb5232 668gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3ad7bb1c 669{
670 /* In case the MD file explicitly references the frame pointer, have
671 all such references point to the same frame pointer. This is
672 used during frame pointer elimination to distinguish the explicit
673 references to these registers from pseudos that happened to be
674 assigned to them.
675
676 If we have eliminated the frame pointer or arg pointer, we will
677 be using it as a normal register, for example as a spill
678 register. In such cases, we might be accessing it in a mode that
679 is not Pmode and therefore cannot use the pre-allocated rtx.
680
681 Also don't do this when we are making new REGs in reload, since
682 we don't want to get confused with the real pointers. */
683
c6a6cdaa 684 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3ad7bb1c 685 {
71801afc 686 if (regno == FRAME_POINTER_REGNUM
687 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 688 return frame_pointer_rtx;
5ae82d58 689#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
71801afc 690 if (regno == HARD_FRAME_POINTER_REGNUM
691 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 692 return hard_frame_pointer_rtx;
693#endif
5ae82d58 694#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
e8b59353 695 if (regno == ARG_POINTER_REGNUM)
3ad7bb1c 696 return arg_pointer_rtx;
697#endif
698#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 699 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 700 return return_address_pointer_rtx;
701#endif
3473aefe 702 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
8d43ad05 703 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
6ea47475 704 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 705 return pic_offset_table_rtx;
e8b59353 706 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 707 return stack_pointer_rtx;
708 }
709
32b53d83 710#if 0
90295bd2 711 /* If the per-function register table has been set up, try to re-use
32b53d83 712 an existing entry in that table to avoid useless generation of RTL.
713
714 This code is disabled for now until we can fix the various backends
715 which depend on having non-shared hard registers in some cases. Long
716 term we want to re-enable this code as it can significantly cut down
71801afc 717 on the amount of useless RTL that gets generated.
718
719 We'll also need to fix some code that runs after reload that wants to
720 set ORIGINAL_REGNO. */
721
90295bd2 722 if (cfun
723 && cfun->emit
724 && regno_reg_rtx
725 && regno < FIRST_PSEUDO_REGISTER
726 && reg_raw_mode[regno] == mode)
727 return regno_reg_rtx[regno];
32b53d83 728#endif
90295bd2 729
22cf44bc 730 return gen_raw_REG (mode, regno);
3ad7bb1c 731}
732
b5ba9f3a 733rtx
35cb5232 734gen_rtx_MEM (enum machine_mode mode, rtx addr)
b5ba9f3a 735{
736 rtx rt = gen_rtx_raw_MEM (mode, addr);
737
738 /* This field is not cleared by the mere allocation of the rtx, so
739 we clear it here. */
c6259b83 740 MEM_ATTRS (rt) = 0;
b5ba9f3a 741
742 return rt;
743}
701e46d0 744
e265a6da 745/* Generate a memory referring to non-trapping constant memory. */
746
747rtx
748gen_const_mem (enum machine_mode mode, rtx addr)
749{
750 rtx mem = gen_rtx_MEM (mode, addr);
751 MEM_READONLY_P (mem) = 1;
752 MEM_NOTRAP_P (mem) = 1;
753 return mem;
754}
755
00060fc2 756/* Generate a MEM referring to fixed portions of the frame, e.g., register
757 save areas. */
758
759rtx
760gen_frame_mem (enum machine_mode mode, rtx addr)
761{
762 rtx mem = gen_rtx_MEM (mode, addr);
763 MEM_NOTRAP_P (mem) = 1;
764 set_mem_alias_set (mem, get_frame_alias_set ());
765 return mem;
766}
767
768/* Generate a MEM referring to a temporary use of the stack, not part
769 of the fixed stack frame. For example, something which is pushed
770 by a target splitter. */
771rtx
772gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
773{
774 rtx mem = gen_rtx_MEM (mode, addr);
775 MEM_NOTRAP_P (mem) = 1;
18d50ae6 776 if (!cfun->calls_alloca)
00060fc2 777 set_mem_alias_set (mem, get_frame_alias_set ());
778 return mem;
779}
780
2166bbaa 781/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
782 this construct would be valid, and false otherwise. */
783
784bool
785validate_subreg (enum machine_mode omode, enum machine_mode imode,
7ecb5bb2 786 const_rtx reg, unsigned int offset)
701e46d0 787{
2166bbaa 788 unsigned int isize = GET_MODE_SIZE (imode);
789 unsigned int osize = GET_MODE_SIZE (omode);
790
791 /* All subregs must be aligned. */
792 if (offset % osize != 0)
793 return false;
794
795 /* The subreg offset cannot be outside the inner object. */
796 if (offset >= isize)
797 return false;
798
799 /* ??? This should not be here. Temporarily continue to allow word_mode
800 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
801 Generally, backends are doing something sketchy but it'll take time to
802 fix them all. */
803 if (omode == word_mode)
804 ;
805 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
806 is the culprit here, and not the backends. */
807 else if (osize >= UNITS_PER_WORD && isize >= osize)
808 ;
809 /* Allow component subregs of complex and vector. Though given the below
810 extraction rules, it's not always clear what that means. */
811 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
812 && GET_MODE_INNER (imode) == omode)
813 ;
814 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
815 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
816 represent this. It's questionable if this ought to be represented at
817 all -- why can't this all be hidden in post-reload splitters that make
818 arbitrarily mode changes to the registers themselves. */
819 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
820 ;
821 /* Subregs involving floating point modes are not allowed to
822 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
823 (subreg:SI (reg:DF) 0) isn't. */
824 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
825 {
c6a6cdaa 826 if (! (isize == osize
827 /* LRA can use subreg to store a floating point value in
828 an integer mode. Although the floating point and the
829 integer modes need the same number of hard registers,
830 the size of floating point mode can be less than the
831 integer mode. LRA also uses subregs for a register
832 should be used in different mode in on insn. */
833 || lra_in_progress))
2166bbaa 834 return false;
835 }
701e46d0 836
2166bbaa 837 /* Paradoxical subregs must have offset zero. */
838 if (osize > isize)
839 return offset == 0;
840
841 /* This is a normal subreg. Verify that the offset is representable. */
842
843 /* For hard registers, we already have most of these rules collected in
844 subreg_offset_representable_p. */
845 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
846 {
847 unsigned int regno = REGNO (reg);
848
849#ifdef CANNOT_CHANGE_MODE_CLASS
850 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
851 && GET_MODE_INNER (imode) == omode)
852 ;
853 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
854 return false;
701e46d0 855#endif
2166bbaa 856
857 return subreg_offset_representable_p (regno, imode, offset, omode);
858 }
859
860 /* For pseudo registers, we want most of the same checks. Namely:
861 If the register no larger than a word, the subreg must be lowpart.
862 If the register is larger than a word, the subreg must be the lowpart
863 of a subword. A subreg does *not* perform arbitrary bit extraction.
864 Given that we've already checked mode/offset alignment, we only have
865 to check subword subregs here. */
c6a6cdaa 866 if (osize < UNITS_PER_WORD
867 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
2166bbaa 868 {
869 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
870 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
871 if (offset % UNITS_PER_WORD != low_off)
872 return false;
873 }
874 return true;
875}
876
877rtx
878gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
879{
880 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
2ff23ed0 881 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 882}
883
c6259b83 884/* Generate a SUBREG representing the least-significant part of REG if MODE
885 is smaller than mode of REG, otherwise paradoxical SUBREG. */
886
701e46d0 887rtx
35cb5232 888gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
701e46d0 889{
890 enum machine_mode inmode;
701e46d0 891
892 inmode = GET_MODE (reg);
893 if (inmode == VOIDmode)
894 inmode = mode;
81802af6 895 return gen_rtx_SUBREG (mode, reg,
896 subreg_lowpart_offset (mode, inmode));
701e46d0 897}
7014838c 898\f
15bbde2b 899
cf9ac040 900/* Create an rtvec and stores within it the RTXen passed in the arguments. */
901
15bbde2b 902rtvec
ee582a61 903gen_rtvec (int n, ...)
15bbde2b 904{
cf9ac040 905 int i;
906 rtvec rt_val;
ee582a61 907 va_list p;
15bbde2b 908
ee582a61 909 va_start (p, n);
15bbde2b 910
cf9ac040 911 /* Don't allocate an empty rtvec... */
15bbde2b 912 if (n == 0)
451c8e2f 913 {
914 va_end (p);
915 return NULL_RTVEC;
916 }
15bbde2b 917
cf9ac040 918 rt_val = rtvec_alloc (n);
e5fcd76a 919
15bbde2b 920 for (i = 0; i < n; i++)
cf9ac040 921 rt_val->elem[i] = va_arg (p, rtx);
7ad77798 922
ee582a61 923 va_end (p);
cf9ac040 924 return rt_val;
15bbde2b 925}
926
927rtvec
35cb5232 928gen_rtvec_v (int n, rtx *argp)
15bbde2b 929{
19cb6b50 930 int i;
931 rtvec rt_val;
15bbde2b 932
cf9ac040 933 /* Don't allocate an empty rtvec... */
15bbde2b 934 if (n == 0)
cf9ac040 935 return NULL_RTVEC;
15bbde2b 936
cf9ac040 937 rt_val = rtvec_alloc (n);
15bbde2b 938
939 for (i = 0; i < n; i++)
a4070a91 940 rt_val->elem[i] = *argp++;
15bbde2b 941
942 return rt_val;
943}
944\f
80c70e76 945/* Return the number of bytes between the start of an OUTER_MODE
946 in-memory value and the start of an INNER_MODE in-memory value,
947 given that the former is a lowpart of the latter. It may be a
948 paradoxical lowpart, in which case the offset will be negative
949 on big-endian targets. */
950
951int
952byte_lowpart_offset (enum machine_mode outer_mode,
953 enum machine_mode inner_mode)
954{
955 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
956 return subreg_lowpart_offset (outer_mode, inner_mode);
957 else
958 return -subreg_lowpart_offset (inner_mode, outer_mode);
959}
960\f
15bbde2b 961/* Generate a REG rtx for a new pseudo register of mode MODE.
962 This pseudo is assigned the next sequential register number. */
963
964rtx
35cb5232 965gen_reg_rtx (enum machine_mode mode)
15bbde2b 966{
19cb6b50 967 rtx val;
27a7a23a 968 unsigned int align = GET_MODE_ALIGNMENT (mode);
15bbde2b 969
1b7ff857 970 gcc_assert (can_create_pseudo_p ());
15bbde2b 971
27a7a23a 972 /* If a virtual register with bigger mode alignment is generated,
973 increase stack alignment estimation because it might be spilled
974 to stack later. */
48e1416a 975 if (SUPPORTS_STACK_ALIGNMENT
27a7a23a 976 && crtl->stack_alignment_estimated < align
977 && !crtl->stack_realign_processed)
8645d3e7 978 {
979 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
980 if (crtl->stack_alignment_estimated < min_align)
981 crtl->stack_alignment_estimated = min_align;
982 }
27a7a23a 983
316bc009 984 if (generating_concat_p
985 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
986 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 987 {
988 /* For complex modes, don't make a single pseudo.
989 Instead, make a CONCAT of two pseudos.
990 This allows noncontiguous allocation of the real and imaginary parts,
991 which makes much better code. Besides, allocating DCmode
992 pseudos overstrains reload on some machines like the 386. */
993 rtx realpart, imagpart;
e9e12845 994 enum machine_mode partmode = GET_MODE_INNER (mode);
76c37538 995
996 realpart = gen_reg_rtx (partmode);
997 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 998 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 999 }
1000
ca74b940 1001 /* Make sure regno_pointer_align, and regno_reg_rtx are large
fcdc122e 1002 enough to have an element for this pseudo reg number. */
15bbde2b 1003
fd6ffb7c 1004 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
15bbde2b 1005 {
fd6ffb7c 1006 int old_size = crtl->emit.regno_pointer_align_length;
9ce37fa7 1007 char *tmp;
fcdc122e 1008 rtx *new1;
fcdc122e 1009
9ce37fa7 1010 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1011 memset (tmp + old_size, 0, old_size);
1012 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
0a893c29 1013
2457c754 1014 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
0a893c29 1015 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 1016 regno_reg_rtx = new1;
1017
fd6ffb7c 1018 crtl->emit.regno_pointer_align_length = old_size * 2;
15bbde2b 1019 }
1020
22cf44bc 1021 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 1022 regno_reg_rtx[reg_rtx_no++] = val;
1023 return val;
1024}
1025
ea239197 1026/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1027
1028bool
1029reg_is_parm_p (rtx reg)
1030{
1031 tree decl;
1032
1033 gcc_assert (REG_P (reg));
1034 decl = REG_EXPR (reg);
1035 return (decl && TREE_CODE (decl) == PARM_DECL);
1036}
1037
80c70e76 1038/* Update NEW with the same attributes as REG, but with OFFSET added
1039 to the REG_OFFSET. */
ca74b940 1040
1a6a0f2a 1041static void
9ce37fa7 1042update_reg_offset (rtx new_rtx, rtx reg, int offset)
ca74b940 1043{
9ce37fa7 1044 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
35cb5232 1045 REG_OFFSET (reg) + offset);
1a6a0f2a 1046}
1047
80c70e76 1048/* Generate a register with same attributes as REG, but with OFFSET
1049 added to the REG_OFFSET. */
1a6a0f2a 1050
1051rtx
1052gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1053 int offset)
1054{
9ce37fa7 1055 rtx new_rtx = gen_rtx_REG (mode, regno);
1a6a0f2a 1056
9ce37fa7 1057 update_reg_offset (new_rtx, reg, offset);
1058 return new_rtx;
1a6a0f2a 1059}
1060
1061/* Generate a new pseudo-register with the same attributes as REG, but
80c70e76 1062 with OFFSET added to the REG_OFFSET. */
1a6a0f2a 1063
1064rtx
1065gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1066{
9ce37fa7 1067 rtx new_rtx = gen_reg_rtx (mode);
1a6a0f2a 1068
9ce37fa7 1069 update_reg_offset (new_rtx, reg, offset);
1070 return new_rtx;
ca74b940 1071}
1072
80c70e76 1073/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1074 new register is a (possibly paradoxical) lowpart of the old one. */
ca74b940 1075
1076void
80c70e76 1077adjust_reg_mode (rtx reg, enum machine_mode mode)
ca74b940 1078{
80c70e76 1079 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1080 PUT_MODE (reg, mode);
1081}
1082
1083/* Copy REG's attributes from X, if X has any attributes. If REG and X
1084 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1085
1086void
1087set_reg_attrs_from_value (rtx reg, rtx x)
1088{
1089 int offset;
e623c80a 1090 bool can_be_reg_pointer = true;
1091
1092 /* Don't call mark_reg_pointer for incompatible pointer sign
1093 extension. */
1094 while (GET_CODE (x) == SIGN_EXTEND
1095 || GET_CODE (x) == ZERO_EXTEND
1096 || GET_CODE (x) == TRUNCATE
1097 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1098 {
1099#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1100 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1101 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1102 can_be_reg_pointer = false;
1103#endif
1104 x = XEXP (x, 0);
1105 }
80c70e76 1106
ac56145e 1107 /* Hard registers can be reused for multiple purposes within the same
1108 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1109 on them is wrong. */
1110 if (HARD_REGISTER_P (reg))
1111 return;
1112
80c70e76 1113 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
ae12ddda 1114 if (MEM_P (x))
1115 {
da443c27 1116 if (MEM_OFFSET_KNOWN_P (x))
1117 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1118 MEM_OFFSET (x) + offset);
e623c80a 1119 if (can_be_reg_pointer && MEM_POINTER (x))
40b93dba 1120 mark_reg_pointer (reg, 0);
ae12ddda 1121 }
1122 else if (REG_P (x))
1123 {
1124 if (REG_ATTRS (x))
1125 update_reg_offset (reg, x, offset);
e623c80a 1126 if (can_be_reg_pointer && REG_POINTER (x))
ae12ddda 1127 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1128 }
1129}
1130
1131/* Generate a REG rtx for a new pseudo register, copying the mode
1132 and attributes from X. */
1133
1134rtx
1135gen_reg_rtx_and_attrs (rtx x)
1136{
1137 rtx reg = gen_reg_rtx (GET_MODE (x));
1138 set_reg_attrs_from_value (reg, x);
1139 return reg;
ca74b940 1140}
1141
263c416c 1142/* Set the register attributes for registers contained in PARM_RTX.
1143 Use needed values from memory attributes of MEM. */
1144
1145void
35cb5232 1146set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
263c416c 1147{
8ad4c111 1148 if (REG_P (parm_rtx))
80c70e76 1149 set_reg_attrs_from_value (parm_rtx, mem);
263c416c 1150 else if (GET_CODE (parm_rtx) == PARALLEL)
1151 {
1152 /* Check for a NULL entry in the first slot, used to indicate that the
1153 parameter goes both on the stack and in registers. */
1154 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1155 for (; i < XVECLEN (parm_rtx, 0); i++)
1156 {
1157 rtx x = XVECEXP (parm_rtx, 0, i);
8ad4c111 1158 if (REG_P (XEXP (x, 0)))
263c416c 1159 REG_ATTRS (XEXP (x, 0))
1160 = get_reg_attrs (MEM_EXPR (mem),
1161 INTVAL (XEXP (x, 1)));
1162 }
1163 }
1164}
1165
80c70e76 1166/* Set the REG_ATTRS for registers in value X, given that X represents
1167 decl T. */
ca74b940 1168
a8dd994c 1169void
80c70e76 1170set_reg_attrs_for_decl_rtl (tree t, rtx x)
1171{
1172 if (GET_CODE (x) == SUBREG)
ebfc27f5 1173 {
80c70e76 1174 gcc_assert (subreg_lowpart_p (x));
1175 x = SUBREG_REG (x);
ebfc27f5 1176 }
8ad4c111 1177 if (REG_P (x))
80c70e76 1178 REG_ATTRS (x)
1179 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
3ff99faa 1180 DECL_MODE (t)));
ca74b940 1181 if (GET_CODE (x) == CONCAT)
1182 {
1183 if (REG_P (XEXP (x, 0)))
1184 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1185 if (REG_P (XEXP (x, 1)))
1186 REG_ATTRS (XEXP (x, 1))
1187 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1188 }
1189 if (GET_CODE (x) == PARALLEL)
1190 {
85d25060 1191 int i, start;
1192
1193 /* Check for a NULL entry, used to indicate that the parameter goes
1194 both on the stack and in registers. */
1195 if (XEXP (XVECEXP (x, 0, 0), 0))
1196 start = 0;
1197 else
1198 start = 1;
1199
1200 for (i = start; i < XVECLEN (x, 0); i++)
ca74b940 1201 {
1202 rtx y = XVECEXP (x, 0, i);
1203 if (REG_P (XEXP (y, 0)))
1204 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1205 }
1206 }
1207}
1208
80c70e76 1209/* Assign the RTX X to declaration T. */
1210
1211void
1212set_decl_rtl (tree t, rtx x)
1213{
1214 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1215 if (x)
1216 set_reg_attrs_for_decl_rtl (t, x);
1217}
1218
d91cf567 1219/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1220 if the ABI requires the parameter to be passed by reference. */
80c70e76 1221
1222void
d91cf567 1223set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
80c70e76 1224{
1225 DECL_INCOMING_RTL (t) = x;
d91cf567 1226 if (x && !by_reference_p)
80c70e76 1227 set_reg_attrs_for_decl_rtl (t, x);
1228}
1229
de8ecfb5 1230/* Identify REG (which may be a CONCAT) as a user register. */
1231
1232void
35cb5232 1233mark_user_reg (rtx reg)
de8ecfb5 1234{
1235 if (GET_CODE (reg) == CONCAT)
1236 {
1237 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1238 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1239 }
de8ecfb5 1240 else
611234b4 1241 {
1242 gcc_assert (REG_P (reg));
1243 REG_USERVAR_P (reg) = 1;
1244 }
de8ecfb5 1245}
1246
d4c332ff 1247/* Identify REG as a probable pointer register and show its alignment
1248 as ALIGN, if nonzero. */
15bbde2b 1249
1250void
35cb5232 1251mark_reg_pointer (rtx reg, int align)
15bbde2b 1252{
e61a0a7f 1253 if (! REG_POINTER (reg))
612409a6 1254 {
e61a0a7f 1255 REG_POINTER (reg) = 1;
d4c332ff 1256
612409a6 1257 if (align)
1258 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1259 }
1260 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
8b332087 1261 /* We can no-longer be sure just how aligned this pointer is. */
d4c332ff 1262 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 1263}
1264
1265/* Return 1 plus largest pseudo reg number used in the current function. */
1266
1267int
35cb5232 1268max_reg_num (void)
15bbde2b 1269{
1270 return reg_rtx_no;
1271}
1272
1273/* Return 1 + the largest label number used so far in the current function. */
1274
1275int
35cb5232 1276max_label_num (void)
15bbde2b 1277{
15bbde2b 1278 return label_num;
1279}
1280
1281/* Return first label number used in this function (if any were used). */
1282
1283int
35cb5232 1284get_first_label_num (void)
15bbde2b 1285{
1286 return first_label_num;
1287}
4ee9c684 1288
1289/* If the rtx for label was created during the expansion of a nested
1290 function, then first_label_num won't include this label number.
f0b5f617 1291 Fix this now so that array indices work later. */
4ee9c684 1292
1293void
1294maybe_set_first_label_num (rtx x)
1295{
1296 if (CODE_LABEL_NUMBER (x) < first_label_num)
1297 first_label_num = CODE_LABEL_NUMBER (x);
1298}
15bbde2b 1299\f
1300/* Return a value representing some low-order bits of X, where the number
1301 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1302 between floating-point and fixed-point values, rather, the bit
15bbde2b 1303 representation is returned.
1304
1305 This function handles the cases in common between gen_lowpart, below,
1306 and two variants in cse.c and combine.c. These are the cases that can
1307 be safely handled at all points in the compilation.
1308
1309 If this is not a case we can handle, return 0. */
1310
1311rtx
35cb5232 1312gen_lowpart_common (enum machine_mode mode, rtx x)
15bbde2b 1313{
701e46d0 1314 int msize = GET_MODE_SIZE (mode);
791172c5 1315 int xsize;
701e46d0 1316 int offset = 0;
791172c5 1317 enum machine_mode innermode;
1318
1319 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1320 so we have to make one up. Yuk. */
1321 innermode = GET_MODE (x);
971ba038 1322 if (CONST_INT_P (x)
6c799a83 1323 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
791172c5 1324 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1325 else if (innermode == VOIDmode)
24cd46a7 1326 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
48e1416a 1327
791172c5 1328 xsize = GET_MODE_SIZE (innermode);
1329
611234b4 1330 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
15bbde2b 1331
791172c5 1332 if (innermode == mode)
15bbde2b 1333 return x;
1334
1335 /* MODE must occupy no more words than the mode of X. */
791172c5 1336 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1337 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
15bbde2b 1338 return 0;
1339
9abe1e73 1340 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
cee7491d 1341 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
9abe1e73 1342 return 0;
1343
791172c5 1344 offset = subreg_lowpart_offset (mode, innermode);
15bbde2b 1345
1346 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 1347 && (GET_MODE_CLASS (mode) == MODE_INT
1348 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 1349 {
1350 /* If we are getting the low-order part of something that has been
1351 sign- or zero-extended, we can either just use the object being
1352 extended or make a narrower extension. If we want an even smaller
1353 piece than the size of the object being extended, call ourselves
1354 recursively.
1355
1356 This case is used mostly by combine and cse. */
1357
1358 if (GET_MODE (XEXP (x, 0)) == mode)
1359 return XEXP (x, 0);
791172c5 1360 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
15bbde2b 1361 return gen_lowpart_common (mode, XEXP (x, 0));
791172c5 1362 else if (msize < xsize)
3ad7bb1c 1363 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 1364 }
8ad4c111 1365 else if (GET_CODE (x) == SUBREG || REG_P (x)
791172c5 1366 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
efa08fc2 1367 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
791172c5 1368 return simplify_gen_subreg (mode, x, innermode, offset);
4a307dd5 1369
15bbde2b 1370 /* Otherwise, we can't do this. */
1371 return 0;
1372}
1373\f
d56d0ca2 1374rtx
35cb5232 1375gen_highpart (enum machine_mode mode, rtx x)
d56d0ca2 1376{
701e46d0 1377 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1378 rtx result;
701e46d0 1379
d56d0ca2 1380 /* This case loses if X is a subreg. To catch bugs early,
1381 complain if an invalid MODE is used even in other cases. */
611234b4 1382 gcc_assert (msize <= UNITS_PER_WORD
1383 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
701e46d0 1384
81802af6 1385 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1386 subreg_highpart_offset (mode, GET_MODE (x)));
611234b4 1387 gcc_assert (result);
48e1416a 1388
a8c36ab2 1389 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1390 the target if we have a MEM. gen_highpart must return a valid operand,
1391 emitting code if necessary to do so. */
611234b4 1392 if (MEM_P (result))
1393 {
1394 result = validize_mem (result);
1395 gcc_assert (result);
1396 }
48e1416a 1397
81802af6 1398 return result;
1399}
704fcf2b 1400
29d56731 1401/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1402 be VOIDmode constant. */
1403rtx
35cb5232 1404gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
704fcf2b 1405{
1406 if (GET_MODE (exp) != VOIDmode)
1407 {
611234b4 1408 gcc_assert (GET_MODE (exp) == innermode);
704fcf2b 1409 return gen_highpart (outermode, exp);
1410 }
1411 return simplify_gen_subreg (outermode, exp, innermode,
1412 subreg_highpart_offset (outermode, innermode));
1413}
d4c5e26d 1414
80c70e76 1415/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
10ef59ac 1416
81802af6 1417unsigned int
35cb5232 1418subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
81802af6 1419{
1420 unsigned int offset = 0;
1421 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1422
81802af6 1423 if (difference > 0)
d56d0ca2 1424 {
81802af6 1425 if (WORDS_BIG_ENDIAN)
1426 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1427 if (BYTES_BIG_ENDIAN)
1428 offset += difference % UNITS_PER_WORD;
d56d0ca2 1429 }
701e46d0 1430
81802af6 1431 return offset;
d56d0ca2 1432}
64ab453f 1433
81802af6 1434/* Return offset in bytes to get OUTERMODE high part
1435 of the value in mode INNERMODE stored in memory in target format. */
1436unsigned int
35cb5232 1437subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
64ab453f 1438{
1439 unsigned int offset = 0;
1440 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1441
611234b4 1442 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
81802af6 1443
64ab453f 1444 if (difference > 0)
1445 {
81802af6 1446 if (! WORDS_BIG_ENDIAN)
64ab453f 1447 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1448 if (! BYTES_BIG_ENDIAN)
64ab453f 1449 offset += difference % UNITS_PER_WORD;
1450 }
1451
81802af6 1452 return offset;
64ab453f 1453}
d56d0ca2 1454
15bbde2b 1455/* Return 1 iff X, assumed to be a SUBREG,
1456 refers to the least significant part of its containing reg.
1457 If X is not a SUBREG, always return 1 (it is its own low part!). */
1458
1459int
b7bf20db 1460subreg_lowpart_p (const_rtx x)
15bbde2b 1461{
1462 if (GET_CODE (x) != SUBREG)
1463 return 1;
7e14c1bf 1464 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1465 return 0;
15bbde2b 1466
81802af6 1467 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1468 == SUBREG_BYTE (x));
15bbde2b 1469}
b537bfdb 1470
1471/* Return true if X is a paradoxical subreg, false otherwise. */
1472bool
1473paradoxical_subreg_p (const_rtx x)
1474{
1475 if (GET_CODE (x) != SUBREG)
1476 return false;
1477 return (GET_MODE_PRECISION (GET_MODE (x))
1478 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1479}
15bbde2b 1480\f
701e46d0 1481/* Return subword OFFSET of operand OP.
1482 The word number, OFFSET, is interpreted as the word number starting
1483 at the low-order address. OFFSET 0 is the low-order word if not
1484 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1485
1486 If we cannot extract the required word, we return zero. Otherwise,
1487 an rtx corresponding to the requested word will be returned.
1488
1489 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1490 reload has completed, a valid address will always be returned. After
1491 reload, if a valid address cannot be returned, we return zero.
1492
1493 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1494 it is the responsibility of the caller.
1495
1496 MODE is the mode of OP in case it is a CONST_INT.
1497
1498 ??? This is still rather broken for some cases. The problem for the
1499 moment is that all callers of this thing provide no 'goal mode' to
1500 tell us to work with. This exists because all callers were written
84e81e84 1501 in a word based SUBREG world.
1502 Now use of this function can be deprecated by simplify_subreg in most
1503 cases.
1504 */
701e46d0 1505
1506rtx
35cb5232 1507operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
701e46d0 1508{
1509 if (mode == VOIDmode)
1510 mode = GET_MODE (op);
1511
611234b4 1512 gcc_assert (mode != VOIDmode);
701e46d0 1513
6312a35e 1514 /* If OP is narrower than a word, fail. */
701e46d0 1515 if (mode != BLKmode
1516 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1517 return 0;
1518
6312a35e 1519 /* If we want a word outside OP, return zero. */
701e46d0 1520 if (mode != BLKmode
1521 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1522 return const0_rtx;
1523
701e46d0 1524 /* Form a new MEM at the requested address. */
e16ceb8e 1525 if (MEM_P (op))
701e46d0 1526 {
9ce37fa7 1527 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1528
e4e86ec5 1529 if (! validate_address)
9ce37fa7 1530 return new_rtx;
e4e86ec5 1531
1532 else if (reload_completed)
701e46d0 1533 {
bd1a81f7 1534 if (! strict_memory_address_addr_space_p (word_mode,
1535 XEXP (new_rtx, 0),
1536 MEM_ADDR_SPACE (op)))
e4e86ec5 1537 return 0;
701e46d0 1538 }
e4e86ec5 1539 else
9ce37fa7 1540 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
701e46d0 1541 }
1542
84e81e84 1543 /* Rest can be handled by simplify_subreg. */
1544 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1545}
1546
89f18f73 1547/* Similar to `operand_subword', but never return 0. If we can't
1548 extract the required subword, put OP into a register and try again.
1549 The second attempt must succeed. We always validate the address in
1550 this case.
15bbde2b 1551
1552 MODE is the mode of OP, in case it is CONST_INT. */
1553
1554rtx
35cb5232 1555operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
15bbde2b 1556{
701e46d0 1557 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1558
1559 if (result)
1560 return result;
1561
1562 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1563 {
1564 /* If this is a register which can not be accessed by words, copy it
1565 to a pseudo register. */
8ad4c111 1566 if (REG_P (op))
ac825d29 1567 op = copy_to_reg (op);
1568 else
1569 op = force_reg (mode, op);
1570 }
15bbde2b 1571
701e46d0 1572 result = operand_subword (op, offset, 1, mode);
611234b4 1573 gcc_assert (result);
15bbde2b 1574
1575 return result;
1576}
1577\f
b3ff8d90 1578/* Returns 1 if both MEM_EXPR can be considered equal
1579 and 0 otherwise. */
1580
1581int
52d07779 1582mem_expr_equal_p (const_tree expr1, const_tree expr2)
b3ff8d90 1583{
1584 if (expr1 == expr2)
1585 return 1;
1586
1587 if (! expr1 || ! expr2)
1588 return 0;
1589
1590 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1591 return 0;
1592
3a443843 1593 return operand_equal_p (expr1, expr2, 0);
b3ff8d90 1594}
1595
ad0a178f 1596/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1597 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1598 -1 if not known. */
1599
1600int
7cfdc2f0 1601get_mem_align_offset (rtx mem, unsigned int align)
ad0a178f 1602{
1603 tree expr;
1604 unsigned HOST_WIDE_INT offset;
1605
1606 /* This function can't use
da443c27 1607 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
98ab9e8f 1608 || (MAX (MEM_ALIGN (mem),
957d0361 1609 MAX (align, get_object_alignment (MEM_EXPR (mem))))
ad0a178f 1610 < align))
1611 return -1;
1612 else
da443c27 1613 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
ad0a178f 1614 for two reasons:
1615 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1616 for <variable>. get_inner_reference doesn't handle it and
1617 even if it did, the alignment in that case needs to be determined
1618 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1619 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1620 isn't sufficiently aligned, the object it is in might be. */
1621 gcc_assert (MEM_P (mem));
1622 expr = MEM_EXPR (mem);
da443c27 1623 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
ad0a178f 1624 return -1;
1625
da443c27 1626 offset = MEM_OFFSET (mem);
ad0a178f 1627 if (DECL_P (expr))
1628 {
1629 if (DECL_ALIGN (expr) < align)
1630 return -1;
1631 }
1632 else if (INDIRECT_REF_P (expr))
1633 {
1634 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1635 return -1;
1636 }
1637 else if (TREE_CODE (expr) == COMPONENT_REF)
1638 {
1639 while (1)
1640 {
1641 tree inner = TREE_OPERAND (expr, 0);
1642 tree field = TREE_OPERAND (expr, 1);
1643 tree byte_offset = component_ref_field_offset (expr);
1644 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1645
1646 if (!byte_offset
e913b5cd 1647 || !tree_fits_uhwi_p (byte_offset)
1648 || !tree_fits_uhwi_p (bit_offset))
ad0a178f 1649 return -1;
1650
e913b5cd 1651 offset += tree_to_uhwi (byte_offset);
1652 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
ad0a178f 1653
1654 if (inner == NULL_TREE)
1655 {
1656 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1657 < (unsigned int) align)
1658 return -1;
1659 break;
1660 }
1661 else if (DECL_P (inner))
1662 {
1663 if (DECL_ALIGN (inner) < align)
1664 return -1;
1665 break;
1666 }
1667 else if (TREE_CODE (inner) != COMPONENT_REF)
1668 return -1;
1669 expr = inner;
1670 }
1671 }
1672 else
1673 return -1;
1674
1675 return offset & ((align / BITS_PER_UNIT) - 1);
1676}
1677
310b57a1 1678/* Given REF (a MEM) and T, either the type of X or the expression
c6259b83 1679 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1680 if we are making a new object of this type. BITPOS is nonzero if
1681 there is an offset outstanding on T that will be applied later. */
c6259b83 1682
1683void
35cb5232 1684set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1685 HOST_WIDE_INT bitpos)
c6259b83 1686{
6f717f77 1687 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1688 tree type;
d72886b5 1689 struct mem_attrs attrs, *defattrs, *refattrs;
3f06bd1b 1690 addr_space_t as;
c6259b83 1691
1692 /* It can happen that type_for_mode was given a mode for which there
1693 is no language-level type. In which case it returns NULL, which
1694 we can see here. */
1695 if (t == NULL_TREE)
1696 return;
1697
1698 type = TYPE_P (t) ? t : TREE_TYPE (t);
4ccffa39 1699 if (type == error_mark_node)
1700 return;
c6259b83 1701
c6259b83 1702 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1703 wrong answer, as it assumes that DECL_RTL already has the right alias
1704 info. Callers should not set DECL_RTL until after the call to
1705 set_mem_attributes. */
611234b4 1706 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
c6259b83 1707
d72886b5 1708 memset (&attrs, 0, sizeof (attrs));
1709
96216d37 1710 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1711 front-end routine) and use it. */
d72886b5 1712 attrs.alias = get_alias_set (t);
c6259b83 1713
fbc6244b 1714 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
8d350e69 1715 MEM_POINTER (ref) = POINTER_TYPE_P (type);
c6259b83 1716
d8dccfe9 1717 /* Default values from pre-existing memory attributes if present. */
d72886b5 1718 refattrs = MEM_ATTRS (ref);
1719 if (refattrs)
d8dccfe9 1720 {
1721 /* ??? Can this ever happen? Calling this routine on a MEM that
1722 already carries memory attributes should probably be invalid. */
d72886b5 1723 attrs.expr = refattrs->expr;
6d58bcba 1724 attrs.offset_known_p = refattrs->offset_known_p;
d72886b5 1725 attrs.offset = refattrs->offset;
6d58bcba 1726 attrs.size_known_p = refattrs->size_known_p;
d72886b5 1727 attrs.size = refattrs->size;
1728 attrs.align = refattrs->align;
d8dccfe9 1729 }
1730
1731 /* Otherwise, default values from the mode of the MEM reference. */
d72886b5 1732 else
d8dccfe9 1733 {
d72886b5 1734 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1735 gcc_assert (!defattrs->expr);
6d58bcba 1736 gcc_assert (!defattrs->offset_known_p);
d72886b5 1737
d8dccfe9 1738 /* Respect mode size. */
6d58bcba 1739 attrs.size_known_p = defattrs->size_known_p;
d72886b5 1740 attrs.size = defattrs->size;
d8dccfe9 1741 /* ??? Is this really necessary? We probably should always get
1742 the size from the type below. */
1743
1744 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1745 if T is an object, always compute the object alignment below. */
d72886b5 1746 if (TYPE_P (t))
1747 attrs.align = defattrs->align;
1748 else
1749 attrs.align = BITS_PER_UNIT;
d8dccfe9 1750 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1751 e.g. if the type carries an alignment attribute. Should we be
1752 able to simply always use TYPE_ALIGN? */
1753 }
1754
a9d9ab08 1755 /* We can set the alignment from the type if we are making an object,
1756 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
679e0056 1757 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
d72886b5 1758 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
679e0056 1759
96216d37 1760 /* If the size is known, we can set that. */
50ba3acc 1761 tree new_size = TYPE_SIZE_UNIT (type);
96216d37 1762
9eec20bf 1763 /* The address-space is that of the type. */
1764 as = TYPE_ADDR_SPACE (type);
1765
579bccf9 1766 /* If T is not a type, we may be able to deduce some more information about
1767 the expression. */
1768 if (! TYPE_P (t))
2a631e19 1769 {
ae2dd339 1770 tree base;
b04fab2a 1771
2a631e19 1772 if (TREE_THIS_VOLATILE (t))
1773 MEM_VOLATILE_P (ref) = 1;
c6259b83 1774
3c00f11c 1775 /* Now remove any conversions: they don't change what the underlying
1776 object is. Likewise for SAVE_EXPR. */
72dd6141 1777 while (CONVERT_EXPR_P (t)
3c00f11c 1778 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1779 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1780 t = TREE_OPERAND (t, 0);
1781
73eb0a09 1782 /* Note whether this expression can trap. */
1783 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1784
1785 base = get_base_address (t);
3f06bd1b 1786 if (base)
1787 {
1788 if (DECL_P (base)
1789 && TREE_READONLY (base)
1790 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1791 && !TREE_THIS_VOLATILE (base))
1792 MEM_READONLY_P (ref) = 1;
1793
1794 /* Mark static const strings readonly as well. */
1795 if (TREE_CODE (base) == STRING_CST
1796 && TREE_READONLY (base)
1797 && TREE_STATIC (base))
1798 MEM_READONLY_P (ref) = 1;
1799
9eec20bf 1800 /* Address-space information is on the base object. */
3f06bd1b 1801 if (TREE_CODE (base) == MEM_REF
1802 || TREE_CODE (base) == TARGET_MEM_REF)
1803 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1804 0))));
1805 else
1806 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1807 }
cab98a0d 1808
2b02580f 1809 /* If this expression uses it's parent's alias set, mark it such
1810 that we won't change it. */
d400f5e1 1811 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
5cc193e7 1812 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1813
2a631e19 1814 /* If this is a decl, set the attributes of the MEM from it. */
1815 if (DECL_P (t))
1816 {
d72886b5 1817 attrs.expr = t;
6d58bcba 1818 attrs.offset_known_p = true;
1819 attrs.offset = 0;
6f717f77 1820 apply_bitpos = bitpos;
50ba3acc 1821 new_size = DECL_SIZE_UNIT (t);
2a631e19 1822 }
1823
9eec20bf 1824 /* ??? If we end up with a constant here do record a MEM_EXPR. */
ce45a448 1825 else if (CONSTANT_CLASS_P (t))
9eec20bf 1826 ;
b10dbbca 1827
50ba3acc 1828 /* If this is a field reference, record it. */
1829 else if (TREE_CODE (t) == COMPONENT_REF)
b10dbbca 1830 {
d72886b5 1831 attrs.expr = t;
6d58bcba 1832 attrs.offset_known_p = true;
1833 attrs.offset = 0;
6f717f77 1834 apply_bitpos = bitpos;
50ba3acc 1835 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1836 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
b10dbbca 1837 }
1838
1839 /* If this is an array reference, look for an outer field reference. */
1840 else if (TREE_CODE (t) == ARRAY_REF)
1841 {
1842 tree off_tree = size_zero_node;
6b039979 1843 /* We can't modify t, because we use it at the end of the
1844 function. */
1845 tree t2 = t;
b10dbbca 1846
1847 do
1848 {
6b039979 1849 tree index = TREE_OPERAND (t2, 1);
6374121b 1850 tree low_bound = array_ref_low_bound (t2);
1851 tree unit_size = array_ref_element_size (t2);
97f8ce30 1852
1853 /* We assume all arrays have sizes that are a multiple of a byte.
1854 First subtract the lower bound, if any, in the type of the
6374121b 1855 index, then convert to sizetype and multiply by the size of
1856 the array element. */
1857 if (! integer_zerop (low_bound))
faa43f85 1858 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1859 index, low_bound);
97f8ce30 1860
6374121b 1861 off_tree = size_binop (PLUS_EXPR,
535664e3 1862 size_binop (MULT_EXPR,
1863 fold_convert (sizetype,
1864 index),
6374121b 1865 unit_size),
1866 off_tree);
6b039979 1867 t2 = TREE_OPERAND (t2, 0);
b10dbbca 1868 }
6b039979 1869 while (TREE_CODE (t2) == ARRAY_REF);
b10dbbca 1870
9eec20bf 1871 if (DECL_P (t2)
1872 || TREE_CODE (t2) == COMPONENT_REF)
b10dbbca 1873 {
d72886b5 1874 attrs.expr = t2;
6d58bcba 1875 attrs.offset_known_p = false;
e913b5cd 1876 if (tree_fits_uhwi_p (off_tree))
6f717f77 1877 {
6d58bcba 1878 attrs.offset_known_p = true;
e913b5cd 1879 attrs.offset = tree_to_uhwi (off_tree);
6f717f77 1880 apply_bitpos = bitpos;
1881 }
b10dbbca 1882 }
9eec20bf 1883 /* Else do not record a MEM_EXPR. */
2d8fe5d0 1884 }
1885
6d72287b 1886 /* If this is an indirect reference, record it. */
182cf5a9 1887 else if (TREE_CODE (t) == MEM_REF
5d9de213 1888 || TREE_CODE (t) == TARGET_MEM_REF)
6d72287b 1889 {
d72886b5 1890 attrs.expr = t;
6d58bcba 1891 attrs.offset_known_p = true;
1892 attrs.offset = 0;
6d72287b 1893 apply_bitpos = bitpos;
1894 }
1895
9eec20bf 1896 /* Compute the alignment. */
1897 unsigned int obj_align;
1898 unsigned HOST_WIDE_INT obj_bitpos;
1899 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1900 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1901 if (obj_bitpos != 0)
1902 obj_align = (obj_bitpos & -obj_bitpos);
1903 attrs.align = MAX (attrs.align, obj_align);
2a631e19 1904 }
1905
e913b5cd 1906 if (tree_fits_uhwi_p (new_size))
50ba3acc 1907 {
1908 attrs.size_known_p = true;
e913b5cd 1909 attrs.size = tree_to_uhwi (new_size);
50ba3acc 1910 }
1911
e2e205b3 1912 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 1913 bit position offset. Similarly, increase the size of the accessed
1914 object to contain the negative offset. */
6f717f77 1915 if (apply_bitpos)
595f1461 1916 {
6d58bcba 1917 gcc_assert (attrs.offset_known_p);
1918 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1919 if (attrs.size_known_p)
1920 attrs.size += apply_bitpos / BITS_PER_UNIT;
595f1461 1921 }
6f717f77 1922
2a631e19 1923 /* Now set the attributes we computed above. */
3f06bd1b 1924 attrs.addrspace = as;
d72886b5 1925 set_mem_attrs (ref, &attrs);
c6259b83 1926}
1927
6f717f77 1928void
35cb5232 1929set_mem_attributes (rtx ref, tree t, int objectp)
6f717f77 1930{
1931 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1932}
1933
c6259b83 1934/* Set the alias set of MEM to SET. */
1935
1936void
32c2fdea 1937set_mem_alias_set (rtx mem, alias_set_type set)
c6259b83 1938{
d72886b5 1939 struct mem_attrs attrs;
1940
c6259b83 1941 /* If the new and old alias sets don't conflict, something is wrong. */
1b4345f7 1942 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
d72886b5 1943 attrs = *get_mem_attrs (mem);
1944 attrs.alias = set;
1945 set_mem_attrs (mem, &attrs);
bd1a81f7 1946}
1947
1948/* Set the address space of MEM to ADDRSPACE (target-defined). */
1949
1950void
1951set_mem_addr_space (rtx mem, addr_space_t addrspace)
1952{
d72886b5 1953 struct mem_attrs attrs;
1954
1955 attrs = *get_mem_attrs (mem);
1956 attrs.addrspace = addrspace;
1957 set_mem_attrs (mem, &attrs);
c6259b83 1958}
96216d37 1959
1c4512da 1960/* Set the alignment of MEM to ALIGN bits. */
96216d37 1961
1962void
35cb5232 1963set_mem_align (rtx mem, unsigned int align)
96216d37 1964{
d72886b5 1965 struct mem_attrs attrs;
1966
1967 attrs = *get_mem_attrs (mem);
1968 attrs.align = align;
1969 set_mem_attrs (mem, &attrs);
96216d37 1970}
278fe152 1971
b10dbbca 1972/* Set the expr for MEM to EXPR. */
278fe152 1973
1974void
35cb5232 1975set_mem_expr (rtx mem, tree expr)
278fe152 1976{
d72886b5 1977 struct mem_attrs attrs;
1978
1979 attrs = *get_mem_attrs (mem);
1980 attrs.expr = expr;
1981 set_mem_attrs (mem, &attrs);
278fe152 1982}
b10dbbca 1983
1984/* Set the offset of MEM to OFFSET. */
1985
1986void
da443c27 1987set_mem_offset (rtx mem, HOST_WIDE_INT offset)
b10dbbca 1988{
d72886b5 1989 struct mem_attrs attrs;
1990
1991 attrs = *get_mem_attrs (mem);
6d58bcba 1992 attrs.offset_known_p = true;
1993 attrs.offset = offset;
da443c27 1994 set_mem_attrs (mem, &attrs);
1995}
1996
1997/* Clear the offset of MEM. */
1998
1999void
2000clear_mem_offset (rtx mem)
2001{
2002 struct mem_attrs attrs;
2003
2004 attrs = *get_mem_attrs (mem);
6d58bcba 2005 attrs.offset_known_p = false;
d72886b5 2006 set_mem_attrs (mem, &attrs);
f0500469 2007}
2008
2009/* Set the size of MEM to SIZE. */
2010
2011void
5b2a69fa 2012set_mem_size (rtx mem, HOST_WIDE_INT size)
f0500469 2013{
d72886b5 2014 struct mem_attrs attrs;
2015
2016 attrs = *get_mem_attrs (mem);
6d58bcba 2017 attrs.size_known_p = true;
2018 attrs.size = size;
5b2a69fa 2019 set_mem_attrs (mem, &attrs);
2020}
2021
2022/* Clear the size of MEM. */
2023
2024void
2025clear_mem_size (rtx mem)
2026{
2027 struct mem_attrs attrs;
2028
2029 attrs = *get_mem_attrs (mem);
6d58bcba 2030 attrs.size_known_p = false;
d72886b5 2031 set_mem_attrs (mem, &attrs);
b10dbbca 2032}
c6259b83 2033\f
96216d37 2034/* Return a memory reference like MEMREF, but with its mode changed to MODE
2035 and its address changed to ADDR. (VOIDmode means don't change the mode.
2036 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2037 returned memory location is required to be valid. The memory
2038 attributes are not changed. */
15bbde2b 2039
96216d37 2040static rtx
35cb5232 2041change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
15bbde2b 2042{
bd1a81f7 2043 addr_space_t as;
9ce37fa7 2044 rtx new_rtx;
15bbde2b 2045
611234b4 2046 gcc_assert (MEM_P (memref));
bd1a81f7 2047 as = MEM_ADDR_SPACE (memref);
15bbde2b 2048 if (mode == VOIDmode)
2049 mode = GET_MODE (memref);
2050 if (addr == 0)
2051 addr = XEXP (memref, 0);
3988ef8b 2052 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
bd1a81f7 2053 && (!validate || memory_address_addr_space_p (mode, addr, as)))
3988ef8b 2054 return memref;
15bbde2b 2055
e4e86ec5 2056 if (validate)
15bbde2b 2057 {
e4e86ec5 2058 if (reload_in_progress || reload_completed)
bd1a81f7 2059 gcc_assert (memory_address_addr_space_p (mode, addr, as));
e4e86ec5 2060 else
bd1a81f7 2061 addr = memory_address_addr_space (mode, addr, as);
15bbde2b 2062 }
d823ba47 2063
e8976cd7 2064 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2065 return memref;
2066
9ce37fa7 2067 new_rtx = gen_rtx_MEM (mode, addr);
2068 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2069 return new_rtx;
15bbde2b 2070}
537ffcfc 2071
96216d37 2072/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2073 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 2074
2075rtx
35cb5232 2076change_address (rtx memref, enum machine_mode mode, rtx addr)
e513d163 2077{
d72886b5 2078 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
9ce37fa7 2079 enum machine_mode mmode = GET_MODE (new_rtx);
d72886b5 2080 struct mem_attrs attrs, *defattrs;
0ab96142 2081
d72886b5 2082 attrs = *get_mem_attrs (memref);
2083 defattrs = mode_mem_attrs[(int) mmode];
6d58bcba 2084 attrs.expr = NULL_TREE;
2085 attrs.offset_known_p = false;
2086 attrs.size_known_p = defattrs->size_known_p;
d72886b5 2087 attrs.size = defattrs->size;
2088 attrs.align = defattrs->align;
6cc60c4d 2089
d28edf0d 2090 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2091 if (new_rtx == memref)
0ab96142 2092 {
d72886b5 2093 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
9ce37fa7 2094 return new_rtx;
0ab96142 2095
9ce37fa7 2096 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2097 MEM_COPY_ATTRIBUTES (new_rtx, memref);
0ab96142 2098 }
d28edf0d 2099
d72886b5 2100 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2101 return new_rtx;
e513d163 2102}
537ffcfc 2103
96216d37 2104/* Return a memory reference like MEMREF, but with its mode changed
2105 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 2106 nonzero, the memory address is forced to be valid.
2d0fd66d 2107 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2108 and the caller is responsible for adjusting MEMREF base register.
2109 If ADJUST_OBJECT is zero, the underlying object associated with the
2110 memory reference is left unchanged and the caller is responsible for
2111 dealing with it. Otherwise, if the new memory reference is outside
226c6baf 2112 the underlying object, even partially, then the object is dropped.
2113 SIZE, if nonzero, is the size of an access in cases where MODE
2114 has no inherent size. */
e4e86ec5 2115
2116rtx
35cb5232 2117adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
226c6baf 2118 int validate, int adjust_address, int adjust_object,
2119 HOST_WIDE_INT size)
e4e86ec5 2120{
fb257ae6 2121 rtx addr = XEXP (memref, 0);
9ce37fa7 2122 rtx new_rtx;
d72886b5 2123 enum machine_mode address_mode;
cfb75cdf 2124 int pbits;
21b8bc7e 2125 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
d72886b5 2126 unsigned HOST_WIDE_INT max_align;
21b8bc7e 2127#ifdef POINTERS_EXTEND_UNSIGNED
2128 enum machine_mode pointer_mode
2129 = targetm.addr_space.pointer_mode (attrs.addrspace);
2130#endif
fb257ae6 2131
4733f549 2132 /* VOIDmode means no mode change for change_address_1. */
2133 if (mode == VOIDmode)
2134 mode = GET_MODE (memref);
2135
226c6baf 2136 /* Take the size of non-BLKmode accesses from the mode. */
2137 defattrs = mode_mem_attrs[(int) mode];
2138 if (defattrs->size_known_p)
2139 size = defattrs->size;
2140
d28edf0d 2141 /* If there are no changes, just return the original memory reference. */
2142 if (mode == GET_MODE (memref) && !offset
226c6baf 2143 && (size == 0 || (attrs.size_known_p && attrs.size == size))
d72886b5 2144 && (!validate || memory_address_addr_space_p (mode, addr,
2145 attrs.addrspace)))
d28edf0d 2146 return memref;
2147
e36c3d58 2148 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 2149 This may happen even if offset is nonzero -- consider
e36c3d58 2150 (plus (plus reg reg) const_int) -- so do this always. */
2151 addr = copy_rtx (addr);
2152
cfb75cdf 2153 /* Convert a possibly large offset to a signed value within the
2154 range of the target address space. */
87cf5753 2155 address_mode = get_address_mode (memref);
98155838 2156 pbits = GET_MODE_BITSIZE (address_mode);
cfb75cdf 2157 if (HOST_BITS_PER_WIDE_INT > pbits)
2158 {
2159 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2160 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2161 >> shift);
2162 }
2163
2d0fd66d 2164 if (adjust_address)
cd358719 2165 {
2166 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2167 object, we can merge it into the LO_SUM. */
2168 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2169 && offset >= 0
2170 && (unsigned HOST_WIDE_INT) offset
2171 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
98155838 2172 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
29c05e22 2173 plus_constant (address_mode,
2174 XEXP (addr, 1), offset));
21b8bc7e 2175#ifdef POINTERS_EXTEND_UNSIGNED
2176 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2177 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2178 the fact that pointers are not allowed to overflow. */
2179 else if (POINTERS_EXTEND_UNSIGNED > 0
2180 && GET_CODE (addr) == ZERO_EXTEND
2181 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2182 && trunc_int_for_mode (offset, pointer_mode) == offset)
2183 addr = gen_rtx_ZERO_EXTEND (address_mode,
2184 plus_constant (pointer_mode,
2185 XEXP (addr, 0), offset));
2186#endif
cd358719 2187 else
29c05e22 2188 addr = plus_constant (address_mode, addr, offset);
cd358719 2189 }
fb257ae6 2190
9ce37fa7 2191 new_rtx = change_address_1 (memref, mode, addr, validate);
96216d37 2192
e077413c 2193 /* If the address is a REG, change_address_1 rightfully returns memref,
2194 but this would destroy memref's MEM_ATTRS. */
2195 if (new_rtx == memref && offset != 0)
2196 new_rtx = copy_rtx (new_rtx);
2197
2d0fd66d 2198 /* Conservatively drop the object if we don't know where we start from. */
2199 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2200 {
2201 attrs.expr = NULL_TREE;
2202 attrs.alias = 0;
2203 }
2204
96216d37 2205 /* Compute the new values of the memory attributes due to this adjustment.
2206 We add the offsets and update the alignment. */
6d58bcba 2207 if (attrs.offset_known_p)
2d0fd66d 2208 {
2209 attrs.offset += offset;
2210
2211 /* Drop the object if the new left end is not within its bounds. */
2212 if (adjust_object && attrs.offset < 0)
2213 {
2214 attrs.expr = NULL_TREE;
2215 attrs.alias = 0;
2216 }
2217 }
96216d37 2218
b8098e5b 2219 /* Compute the new alignment by taking the MIN of the alignment and the
2220 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2221 if zero. */
2222 if (offset != 0)
d72886b5 2223 {
2224 max_align = (offset & -offset) * BITS_PER_UNIT;
2225 attrs.align = MIN (attrs.align, max_align);
2226 }
96216d37 2227
226c6baf 2228 if (size)
6d58bcba 2229 {
2d0fd66d 2230 /* Drop the object if the new right end is not within its bounds. */
226c6baf 2231 if (adjust_object && (offset + size) > attrs.size)
2d0fd66d 2232 {
2233 attrs.expr = NULL_TREE;
2234 attrs.alias = 0;
2235 }
6d58bcba 2236 attrs.size_known_p = true;
226c6baf 2237 attrs.size = size;
6d58bcba 2238 }
2239 else if (attrs.size_known_p)
2d0fd66d 2240 {
226c6baf 2241 gcc_assert (!adjust_object);
2d0fd66d 2242 attrs.size -= offset;
226c6baf 2243 /* ??? The store_by_pieces machinery generates negative sizes,
2244 so don't assert for that here. */
2d0fd66d 2245 }
5cc193e7 2246
d72886b5 2247 set_mem_attrs (new_rtx, &attrs);
96216d37 2248
9ce37fa7 2249 return new_rtx;
e4e86ec5 2250}
2251
bf42c62d 2252/* Return a memory reference like MEMREF, but with its mode changed
2253 to MODE and its address changed to ADDR, which is assumed to be
f0b5f617 2254 MEMREF offset by OFFSET bytes. If VALIDATE is
bf42c62d 2255 nonzero, the memory address is forced to be valid. */
2256
2257rtx
35cb5232 2258adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2259 HOST_WIDE_INT offset, int validate)
bf42c62d 2260{
2261 memref = change_address_1 (memref, VOIDmode, addr, validate);
226c6baf 2262 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
bf42c62d 2263}
2264
2a631e19 2265/* Return a memory reference like MEMREF, but whose address is changed by
2266 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2267 known to be in OFFSET (possibly 1). */
fcdc122e 2268
2269rtx
35cb5232 2270offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
fcdc122e 2271{
9ce37fa7 2272 rtx new_rtx, addr = XEXP (memref, 0);
d72886b5 2273 enum machine_mode address_mode;
6d58bcba 2274 struct mem_attrs attrs, *defattrs;
fac6aae6 2275
d72886b5 2276 attrs = *get_mem_attrs (memref);
87cf5753 2277 address_mode = get_address_mode (memref);
98155838 2278 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2279
d4c5e26d 2280 /* At this point we don't know _why_ the address is invalid. It
917bbcab 2281 could have secondary memory references, multiplies or anything.
fac6aae6 2282
2283 However, if we did go and rearrange things, we can wind up not
2284 being able to recognize the magic around pic_offset_table_rtx.
2285 This stuff is fragile, and is yet another example of why it is
2286 bad to expose PIC machinery too early. */
d72886b5 2287 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2288 attrs.addrspace)
fac6aae6 2289 && GET_CODE (addr) == PLUS
2290 && XEXP (addr, 0) == pic_offset_table_rtx)
2291 {
2292 addr = force_reg (GET_MODE (addr), addr);
98155838 2293 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2294 }
2295
9ce37fa7 2296 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2297 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
fcdc122e 2298
d28edf0d 2299 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2300 if (new_rtx == memref)
2301 return new_rtx;
d28edf0d 2302
fcdc122e 2303 /* Update the alignment to reflect the offset. Reset the offset, which
2304 we don't know. */
6d58bcba 2305 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2306 attrs.offset_known_p = false;
2307 attrs.size_known_p = defattrs->size_known_p;
2308 attrs.size = defattrs->size;
d72886b5 2309 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2310 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2311 return new_rtx;
fcdc122e 2312}
d4c5e26d 2313
537ffcfc 2314/* Return a memory reference like MEMREF, but with its address changed to
2315 ADDR. The caller is asserting that the actual piece of memory pointed
2316 to is the same, just the form of the address is being changed, such as
2317 by putting something into a register. */
2318
2319rtx
35cb5232 2320replace_equiv_address (rtx memref, rtx addr)
537ffcfc 2321{
96216d37 2322 /* change_address_1 copies the memory attribute structure without change
2323 and that's exactly what we want here. */
ecfe4ca9 2324 update_temp_slot_address (XEXP (memref, 0), addr);
96216d37 2325 return change_address_1 (memref, VOIDmode, addr, 1);
537ffcfc 2326}
96216d37 2327
e4e86ec5 2328/* Likewise, but the reference is not required to be valid. */
2329
2330rtx
35cb5232 2331replace_equiv_address_nv (rtx memref, rtx addr)
e4e86ec5 2332{
e4e86ec5 2333 return change_address_1 (memref, VOIDmode, addr, 0);
2334}
8259ab07 2335
2336/* Return a memory reference like MEMREF, but with its mode widened to
2337 MODE and offset by OFFSET. This would be used by targets that e.g.
2338 cannot issue QImode memory operations and have to use SImode memory
2339 operations plus masking logic. */
2340
2341rtx
35cb5232 2342widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
8259ab07 2343{
226c6baf 2344 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
d72886b5 2345 struct mem_attrs attrs;
8259ab07 2346 unsigned int size = GET_MODE_SIZE (mode);
2347
d28edf0d 2348 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2349 if (new_rtx == memref)
2350 return new_rtx;
d28edf0d 2351
d72886b5 2352 attrs = *get_mem_attrs (new_rtx);
2353
8259ab07 2354 /* If we don't know what offset we were at within the expression, then
2355 we can't know if we've overstepped the bounds. */
6d58bcba 2356 if (! attrs.offset_known_p)
d72886b5 2357 attrs.expr = NULL_TREE;
8259ab07 2358
d72886b5 2359 while (attrs.expr)
8259ab07 2360 {
d72886b5 2361 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
8259ab07 2362 {
d72886b5 2363 tree field = TREE_OPERAND (attrs.expr, 1);
2364 tree offset = component_ref_field_offset (attrs.expr);
8259ab07 2365
2366 if (! DECL_SIZE_UNIT (field))
2367 {
d72886b5 2368 attrs.expr = NULL_TREE;
8259ab07 2369 break;
2370 }
2371
2372 /* Is the field at least as large as the access? If so, ok,
2373 otherwise strip back to the containing structure. */
8359cfb4 2374 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2375 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
6d58bcba 2376 && attrs.offset >= 0)
8259ab07 2377 break;
2378
e913b5cd 2379 if (! tree_fits_uhwi_p (offset))
8259ab07 2380 {
d72886b5 2381 attrs.expr = NULL_TREE;
8259ab07 2382 break;
2383 }
2384
d72886b5 2385 attrs.expr = TREE_OPERAND (attrs.expr, 0);
e913b5cd 2386 attrs.offset += tree_to_uhwi (offset);
2387 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
6d58bcba 2388 / BITS_PER_UNIT);
8259ab07 2389 }
2390 /* Similarly for the decl. */
d72886b5 2391 else if (DECL_P (attrs.expr)
2392 && DECL_SIZE_UNIT (attrs.expr)
2393 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2394 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
6d58bcba 2395 && (! attrs.offset_known_p || attrs.offset >= 0))
8259ab07 2396 break;
2397 else
2398 {
2399 /* The widened memory access overflows the expression, which means
2400 that it could alias another expression. Zap it. */
d72886b5 2401 attrs.expr = NULL_TREE;
8259ab07 2402 break;
2403 }
2404 }
2405
d72886b5 2406 if (! attrs.expr)
6d58bcba 2407 attrs.offset_known_p = false;
8259ab07 2408
2409 /* The widened memory may alias other stuff, so zap the alias set. */
2410 /* ??? Maybe use get_alias_set on any remaining expression. */
d72886b5 2411 attrs.alias = 0;
6d58bcba 2412 attrs.size_known_p = true;
2413 attrs.size = size;
d72886b5 2414 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2415 return new_rtx;
8259ab07 2416}
15bbde2b 2417\f
ac681e84 2418/* A fake decl that is used as the MEM_EXPR of spill slots. */
2419static GTY(()) tree spill_slot_decl;
2420
58029e61 2421tree
2422get_spill_slot_decl (bool force_build_p)
ac681e84 2423{
2424 tree d = spill_slot_decl;
2425 rtx rd;
d72886b5 2426 struct mem_attrs attrs;
ac681e84 2427
58029e61 2428 if (d || !force_build_p)
ac681e84 2429 return d;
2430
e60a6f7b 2431 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2432 VAR_DECL, get_identifier ("%sfp"), void_type_node);
ac681e84 2433 DECL_ARTIFICIAL (d) = 1;
2434 DECL_IGNORED_P (d) = 1;
2435 TREE_USED (d) = 1;
ac681e84 2436 spill_slot_decl = d;
2437
2438 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2439 MEM_NOTRAP_P (rd) = 1;
d72886b5 2440 attrs = *mode_mem_attrs[(int) BLKmode];
2441 attrs.alias = new_alias_set ();
2442 attrs.expr = d;
2443 set_mem_attrs (rd, &attrs);
ac681e84 2444 SET_DECL_RTL (d, rd);
2445
2446 return d;
2447}
2448
2449/* Given MEM, a result from assign_stack_local, fill in the memory
2450 attributes as appropriate for a register allocator spill slot.
2451 These slots are not aliasable by other memory. We arrange for
2452 them all to use a single MEM_EXPR, so that the aliasing code can
2453 work properly in the case of shared spill slots. */
2454
2455void
2456set_mem_attrs_for_spill (rtx mem)
2457{
d72886b5 2458 struct mem_attrs attrs;
2459 rtx addr;
ac681e84 2460
d72886b5 2461 attrs = *get_mem_attrs (mem);
2462 attrs.expr = get_spill_slot_decl (true);
2463 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2464 attrs.addrspace = ADDR_SPACE_GENERIC;
ac681e84 2465
2466 /* We expect the incoming memory to be of the form:
2467 (mem:MODE (plus (reg sfp) (const_int offset)))
2468 with perhaps the plus missing for offset = 0. */
2469 addr = XEXP (mem, 0);
6d58bcba 2470 attrs.offset_known_p = true;
2471 attrs.offset = 0;
ac681e84 2472 if (GET_CODE (addr) == PLUS
971ba038 2473 && CONST_INT_P (XEXP (addr, 1)))
6d58bcba 2474 attrs.offset = INTVAL (XEXP (addr, 1));
ac681e84 2475
d72886b5 2476 set_mem_attrs (mem, &attrs);
ac681e84 2477 MEM_NOTRAP_P (mem) = 1;
2478}
2479\f
15bbde2b 2480/* Return a newly created CODE_LABEL rtx with a unique label number. */
2481
2482rtx
35cb5232 2483gen_label_rtx (void)
15bbde2b 2484{
a7ae1e59 2485 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
35cb5232 2486 NULL, label_num++, NULL);
15bbde2b 2487}
2488\f
2489/* For procedure integration. */
2490
15bbde2b 2491/* Install new pointers to the first and last insns in the chain.
d4c332ff 2492 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2493 Used for an inline-procedure after copying the insn chain. */
2494
2495void
35cb5232 2496set_new_first_and_last_insn (rtx first, rtx last)
15bbde2b 2497{
d4c332ff 2498 rtx insn;
2499
06f9d6ef 2500 set_first_insn (first);
2501 set_last_insn (last);
d4c332ff 2502 cur_insn_uid = 0;
2503
9845d120 2504 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2505 {
2506 int debug_count = 0;
2507
2508 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2509 cur_debug_insn_uid = 0;
2510
2511 for (insn = first; insn; insn = NEXT_INSN (insn))
2512 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2513 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2514 else
2515 {
2516 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2517 if (DEBUG_INSN_P (insn))
2518 debug_count++;
2519 }
2520
2521 if (debug_count)
2522 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2523 else
2524 cur_debug_insn_uid++;
2525 }
2526 else
2527 for (insn = first; insn; insn = NEXT_INSN (insn))
2528 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
d4c332ff 2529
2530 cur_insn_uid++;
15bbde2b 2531}
15bbde2b 2532\f
d823ba47 2533/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2534 structure. This routine should only be called once. */
15bbde2b 2535
a40c0eeb 2536static void
df329266 2537unshare_all_rtl_1 (rtx insn)
15bbde2b 2538{
2d96a59a 2539 /* Unshare just about everything else. */
1cd4cfea 2540 unshare_all_rtl_in_chain (insn);
d823ba47 2541
15bbde2b 2542 /* Make sure the addresses of stack slots found outside the insn chain
2543 (such as, in DECL_RTL of a variable) are not shared
2544 with the insn chain.
2545
2546 This special care is necessary when the stack slot MEM does not
2547 actually appear in the insn chain. If it does appear, its address
2548 is unshared from all else at that point. */
45733446 2549 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
15bbde2b 2550}
2551
d823ba47 2552/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2553 structure, again. This is a fairly expensive thing to do so it
2554 should be done sparingly. */
2555
2556void
35cb5232 2557unshare_all_rtl_again (rtx insn)
2d96a59a 2558{
2559 rtx p;
5244079b 2560 tree decl;
2561
2d96a59a 2562 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2563 if (INSN_P (p))
2d96a59a 2564 {
2565 reset_used_flags (PATTERN (p));
2566 reset_used_flags (REG_NOTES (p));
6d2a4bac 2567 if (CALL_P (p))
2568 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2d96a59a 2569 }
5244079b 2570
01dc9f0c 2571 /* Make sure that virtual stack slots are not shared. */
265be050 2572 set_used_decls (DECL_INITIAL (cfun->decl));
01dc9f0c 2573
5244079b 2574 /* Make sure that virtual parameters are not shared. */
1767a056 2575 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
265be050 2576 set_used_flags (DECL_RTL (decl));
5244079b 2577
2578 reset_used_flags (stack_slot_list);
2579
df329266 2580 unshare_all_rtl_1 (insn);
a40c0eeb 2581}
2582
2a1990e9 2583unsigned int
a40c0eeb 2584unshare_all_rtl (void)
2585{
df329266 2586 unshare_all_rtl_1 (get_insns ());
2a1990e9 2587 return 0;
2d96a59a 2588}
2589
77fce4cd 2590
1cd4cfea 2591/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2592 Recursively does the same for subexpressions. */
2593
2594static void
2595verify_rtx_sharing (rtx orig, rtx insn)
2596{
2597 rtx x = orig;
2598 int i;
2599 enum rtx_code code;
2600 const char *format_ptr;
2601
2602 if (x == 0)
2603 return;
2604
2605 code = GET_CODE (x);
2606
2607 /* These types may be freely shared. */
2608
2609 switch (code)
2610 {
2611 case REG:
688ff29b 2612 case DEBUG_EXPR:
2613 case VALUE:
0349edce 2614 CASE_CONST_ANY:
1cd4cfea 2615 case SYMBOL_REF:
2616 case LABEL_REF:
2617 case CODE_LABEL:
2618 case PC:
2619 case CC0:
1a860023 2620 case RETURN:
9cb2517e 2621 case SIMPLE_RETURN:
1cd4cfea 2622 case SCRATCH:
c09425a0 2623 /* SCRATCH must be shared because they represent distinct values. */
b291008a 2624 return;
c09425a0 2625 case CLOBBER:
b291008a 2626 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2627 clobbers or clobbers of hard registers that originated as pseudos.
2628 This is needed to allow safe register renaming. */
2629 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2630 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
c09425a0 2631 return;
2632 break;
1cd4cfea 2633
2634 case CONST:
3072d30e 2635 if (shared_const_p (orig))
1cd4cfea 2636 return;
2637 break;
2638
2639 case MEM:
2640 /* A MEM is allowed to be shared if its address is constant. */
2641 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2642 || reload_completed || reload_in_progress)
2643 return;
2644
2645 break;
2646
2647 default:
2648 break;
2649 }
2650
2651 /* This rtx may not be shared. If it has already been seen,
2652 replace it with a copy of itself. */
9cee7c3f 2653#ifdef ENABLE_CHECKING
1cd4cfea 2654 if (RTX_FLAG (x, used))
2655 {
0a81f5a0 2656 error ("invalid rtl sharing found in the insn");
1cd4cfea 2657 debug_rtx (insn);
0a81f5a0 2658 error ("shared rtx");
1cd4cfea 2659 debug_rtx (x);
0a81f5a0 2660 internal_error ("internal consistency failure");
1cd4cfea 2661 }
9cee7c3f 2662#endif
2663 gcc_assert (!RTX_FLAG (x, used));
48e1416a 2664
1cd4cfea 2665 RTX_FLAG (x, used) = 1;
2666
8b332087 2667 /* Now scan the subexpressions recursively. */
1cd4cfea 2668
2669 format_ptr = GET_RTX_FORMAT (code);
2670
2671 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2672 {
2673 switch (*format_ptr++)
2674 {
2675 case 'e':
2676 verify_rtx_sharing (XEXP (x, i), insn);
2677 break;
2678
2679 case 'E':
2680 if (XVEC (x, i) != NULL)
2681 {
2682 int j;
2683 int len = XVECLEN (x, i);
2684
2685 for (j = 0; j < len; j++)
2686 {
9cee7c3f 2687 /* We allow sharing of ASM_OPERANDS inside single
2688 instruction. */
1cd4cfea 2689 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
9cee7c3f 2690 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2691 == ASM_OPERANDS))
1cd4cfea 2692 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2693 else
2694 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2695 }
2696 }
2697 break;
2698 }
2699 }
2700 return;
2701}
2702
1e9af25c 2703/* Reset used-flags for INSN. */
2704
2705static void
2706reset_insn_used_flags (rtx insn)
2707{
2708 gcc_assert (INSN_P (insn));
2709 reset_used_flags (PATTERN (insn));
2710 reset_used_flags (REG_NOTES (insn));
2711 if (CALL_P (insn))
2712 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2713}
2714
7cdd84a2 2715/* Go through all the RTL insn bodies and clear all the USED bits. */
1cd4cfea 2716
7cdd84a2 2717static void
2718reset_all_used_flags (void)
1cd4cfea 2719{
2720 rtx p;
2721
2722 for (p = get_insns (); p; p = NEXT_INSN (p))
2723 if (INSN_P (p))
2724 {
1e9af25c 2725 rtx pat = PATTERN (p);
2726 if (GET_CODE (pat) != SEQUENCE)
2727 reset_insn_used_flags (p);
2728 else
764f640f 2729 {
1e9af25c 2730 gcc_assert (REG_NOTES (p) == NULL);
2731 for (int i = 0; i < XVECLEN (pat, 0); i++)
2732 reset_insn_used_flags (XVECEXP (pat, 0, i));
764f640f 2733 }
1cd4cfea 2734 }
7cdd84a2 2735}
2736
1e9af25c 2737/* Verify sharing in INSN. */
2738
2739static void
2740verify_insn_sharing (rtx insn)
2741{
2742 gcc_assert (INSN_P (insn));
2743 reset_used_flags (PATTERN (insn));
2744 reset_used_flags (REG_NOTES (insn));
2745 if (CALL_P (insn))
2746 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2747}
2748
7cdd84a2 2749/* Go through all the RTL insn bodies and check that there is no unexpected
2750 sharing in between the subexpressions. */
2751
2752DEBUG_FUNCTION void
2753verify_rtl_sharing (void)
2754{
2755 rtx p;
2756
2757 timevar_push (TV_VERIFY_RTL_SHARING);
2758
2759 reset_all_used_flags ();
1cd4cfea 2760
2761 for (p = get_insns (); p; p = NEXT_INSN (p))
2762 if (INSN_P (p))
2763 {
1e9af25c 2764 rtx pat = PATTERN (p);
2765 if (GET_CODE (pat) != SEQUENCE)
2766 verify_insn_sharing (p);
2767 else
2768 for (int i = 0; i < XVECLEN (pat, 0); i++)
2769 verify_insn_sharing (XVECEXP (pat, 0, i));
1cd4cfea 2770 }
4b366dd3 2771
7cdd84a2 2772 reset_all_used_flags ();
2773
4b366dd3 2774 timevar_pop (TV_VERIFY_RTL_SHARING);
1cd4cfea 2775}
2776
2d96a59a 2777/* Go through all the RTL insn bodies and copy any invalid shared structure.
2778 Assumes the mark bits are cleared at entry. */
2779
1cd4cfea 2780void
2781unshare_all_rtl_in_chain (rtx insn)
2d96a59a 2782{
2783 for (; insn; insn = NEXT_INSN (insn))
9204e736 2784 if (INSN_P (insn))
2d96a59a 2785 {
2786 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2787 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
6d2a4bac 2788 if (CALL_P (insn))
2789 CALL_INSN_FUNCTION_USAGE (insn)
2790 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2d96a59a 2791 }
2792}
2793
01dc9f0c 2794/* Go through all virtual stack slots of a function and mark them as
265be050 2795 shared. We never replace the DECL_RTLs themselves with a copy,
2796 but expressions mentioned into a DECL_RTL cannot be shared with
2797 expressions in the instruction stream.
2798
2799 Note that reload may convert pseudo registers into memories in-place.
2800 Pseudo registers are always shared, but MEMs never are. Thus if we
2801 reset the used flags on MEMs in the instruction stream, we must set
2802 them again on MEMs that appear in DECL_RTLs. */
2803
01dc9f0c 2804static void
265be050 2805set_used_decls (tree blk)
01dc9f0c 2806{
2807 tree t;
2808
2809 /* Mark decls. */
1767a056 2810 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
0e8e37b2 2811 if (DECL_RTL_SET_P (t))
265be050 2812 set_used_flags (DECL_RTL (t));
01dc9f0c 2813
2814 /* Now process sub-blocks. */
93110716 2815 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
265be050 2816 set_used_decls (t);
01dc9f0c 2817}
2818
15bbde2b 2819/* Mark ORIG as in use, and return a copy of it if it was already in use.
7ba6ce7a 2820 Recursively does the same for subexpressions. Uses
2821 copy_rtx_if_shared_1 to reduce stack space. */
15bbde2b 2822
2823rtx
35cb5232 2824copy_rtx_if_shared (rtx orig)
15bbde2b 2825{
0e0727c4 2826 copy_rtx_if_shared_1 (&orig);
2827 return orig;
2828}
2829
7ba6ce7a 2830/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2831 use. Recursively does the same for subexpressions. */
2832
0e0727c4 2833static void
2834copy_rtx_if_shared_1 (rtx *orig1)
2835{
2836 rtx x;
19cb6b50 2837 int i;
2838 enum rtx_code code;
0e0727c4 2839 rtx *last_ptr;
19cb6b50 2840 const char *format_ptr;
15bbde2b 2841 int copied = 0;
0e0727c4 2842 int length;
2843
2844 /* Repeat is used to turn tail-recursion into iteration. */
2845repeat:
2846 x = *orig1;
15bbde2b 2847
2848 if (x == 0)
0e0727c4 2849 return;
15bbde2b 2850
2851 code = GET_CODE (x);
2852
2853 /* These types may be freely shared. */
2854
2855 switch (code)
2856 {
2857 case REG:
688ff29b 2858 case DEBUG_EXPR:
2859 case VALUE:
0349edce 2860 CASE_CONST_ANY:
15bbde2b 2861 case SYMBOL_REF:
1cd4cfea 2862 case LABEL_REF:
15bbde2b 2863 case CODE_LABEL:
2864 case PC:
2865 case CC0:
e0691b9a 2866 case RETURN:
9cb2517e 2867 case SIMPLE_RETURN:
15bbde2b 2868 case SCRATCH:
a92771b8 2869 /* SCRATCH must be shared because they represent distinct values. */
0e0727c4 2870 return;
c09425a0 2871 case CLOBBER:
b291008a 2872 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2873 clobbers or clobbers of hard registers that originated as pseudos.
2874 This is needed to allow safe register renaming. */
2875 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2876 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
c09425a0 2877 return;
2878 break;
15bbde2b 2879
f63d12e3 2880 case CONST:
3072d30e 2881 if (shared_const_p (x))
0e0727c4 2882 return;
f63d12e3 2883 break;
2884
9845d120 2885 case DEBUG_INSN:
15bbde2b 2886 case INSN:
2887 case JUMP_INSN:
2888 case CALL_INSN:
2889 case NOTE:
15bbde2b 2890 case BARRIER:
2891 /* The chain of insns is not being copied. */
0e0727c4 2892 return;
15bbde2b 2893
0dbd1c74 2894 default:
2895 break;
15bbde2b 2896 }
2897
2898 /* This rtx may not be shared. If it has already been seen,
2899 replace it with a copy of itself. */
2900
7c25cb91 2901 if (RTX_FLAG (x, used))
15bbde2b 2902 {
f2d0e9f1 2903 x = shallow_copy_rtx (x);
15bbde2b 2904 copied = 1;
2905 }
7c25cb91 2906 RTX_FLAG (x, used) = 1;
15bbde2b 2907
2908 /* Now scan the subexpressions recursively.
2909 We can store any replaced subexpressions directly into X
2910 since we know X is not shared! Any vectors in X
2911 must be copied if X was copied. */
2912
2913 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2914 length = GET_RTX_LENGTH (code);
2915 last_ptr = NULL;
48e1416a 2916
0e0727c4 2917 for (i = 0; i < length; i++)
15bbde2b 2918 {
2919 switch (*format_ptr++)
2920 {
2921 case 'e':
0e0727c4 2922 if (last_ptr)
2923 copy_rtx_if_shared_1 (last_ptr);
2924 last_ptr = &XEXP (x, i);
15bbde2b 2925 break;
2926
2927 case 'E':
2928 if (XVEC (x, i) != NULL)
2929 {
19cb6b50 2930 int j;
ffe0869b 2931 int len = XVECLEN (x, i);
48e1416a 2932
8b332087 2933 /* Copy the vector iff I copied the rtx and the length
2934 is nonzero. */
ffe0869b 2935 if (copied && len > 0)
a4070a91 2936 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
48e1416a 2937
d632b59a 2938 /* Call recursively on all inside the vector. */
ffe0869b 2939 for (j = 0; j < len; j++)
0e0727c4 2940 {
2941 if (last_ptr)
2942 copy_rtx_if_shared_1 (last_ptr);
2943 last_ptr = &XVECEXP (x, i, j);
2944 }
15bbde2b 2945 }
2946 break;
2947 }
2948 }
0e0727c4 2949 *orig1 = x;
2950 if (last_ptr)
2951 {
2952 orig1 = last_ptr;
2953 goto repeat;
2954 }
2955 return;
15bbde2b 2956}
2957
709947e6 2958/* Set the USED bit in X and its non-shareable subparts to FLAG. */
15bbde2b 2959
709947e6 2960static void
2961mark_used_flags (rtx x, int flag)
15bbde2b 2962{
19cb6b50 2963 int i, j;
2964 enum rtx_code code;
2965 const char *format_ptr;
0e0727c4 2966 int length;
15bbde2b 2967
0e0727c4 2968 /* Repeat is used to turn tail-recursion into iteration. */
2969repeat:
15bbde2b 2970 if (x == 0)
2971 return;
2972
2973 code = GET_CODE (x);
2974
c3418f42 2975 /* These types may be freely shared so we needn't do any resetting
15bbde2b 2976 for them. */
2977
2978 switch (code)
2979 {
2980 case REG:
688ff29b 2981 case DEBUG_EXPR:
2982 case VALUE:
0349edce 2983 CASE_CONST_ANY:
15bbde2b 2984 case SYMBOL_REF:
2985 case CODE_LABEL:
2986 case PC:
2987 case CC0:
e0691b9a 2988 case RETURN:
9cb2517e 2989 case SIMPLE_RETURN:
15bbde2b 2990 return;
2991
9845d120 2992 case DEBUG_INSN:
15bbde2b 2993 case INSN:
2994 case JUMP_INSN:
2995 case CALL_INSN:
2996 case NOTE:
2997 case LABEL_REF:
2998 case BARRIER:
2999 /* The chain of insns is not being copied. */
3000 return;
d823ba47 3001
0dbd1c74 3002 default:
3003 break;
15bbde2b 3004 }
3005
709947e6 3006 RTX_FLAG (x, used) = flag;
15bbde2b 3007
3008 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 3009 length = GET_RTX_LENGTH (code);
48e1416a 3010
0e0727c4 3011 for (i = 0; i < length; i++)
15bbde2b 3012 {
3013 switch (*format_ptr++)
3014 {
3015 case 'e':
0e0727c4 3016 if (i == length-1)
3017 {
3018 x = XEXP (x, i);
3019 goto repeat;
3020 }
709947e6 3021 mark_used_flags (XEXP (x, i), flag);
15bbde2b 3022 break;
3023
3024 case 'E':
3025 for (j = 0; j < XVECLEN (x, i); j++)
709947e6 3026 mark_used_flags (XVECEXP (x, i, j), flag);
15bbde2b 3027 break;
3028 }
3029 }
3030}
1cd4cfea 3031
709947e6 3032/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1cd4cfea 3033 to look for shared sub-parts. */
3034
3035void
709947e6 3036reset_used_flags (rtx x)
1cd4cfea 3037{
709947e6 3038 mark_used_flags (x, 0);
3039}
1cd4cfea 3040
709947e6 3041/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3042 to look for shared sub-parts. */
1cd4cfea 3043
709947e6 3044void
3045set_used_flags (rtx x)
3046{
3047 mark_used_flags (x, 1);
1cd4cfea 3048}
15bbde2b 3049\f
3050/* Copy X if necessary so that it won't be altered by changes in OTHER.
3051 Return X or the rtx for the pseudo reg the value of X was copied into.
3052 OTHER must be valid as a SET_DEST. */
3053
3054rtx
35cb5232 3055make_safe_from (rtx x, rtx other)
15bbde2b 3056{
3057 while (1)
3058 switch (GET_CODE (other))
3059 {
3060 case SUBREG:
3061 other = SUBREG_REG (other);
3062 break;
3063 case STRICT_LOW_PART:
3064 case SIGN_EXTEND:
3065 case ZERO_EXTEND:
3066 other = XEXP (other, 0);
3067 break;
3068 default:
3069 goto done;
3070 }
3071 done:
e16ceb8e 3072 if ((MEM_P (other)
15bbde2b 3073 && ! CONSTANT_P (x)
8ad4c111 3074 && !REG_P (x)
15bbde2b 3075 && GET_CODE (x) != SUBREG)
8ad4c111 3076 || (REG_P (other)
15bbde2b 3077 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3078 || reg_mentioned_p (other, x))))
3079 {
3080 rtx temp = gen_reg_rtx (GET_MODE (x));
3081 emit_move_insn (temp, x);
3082 return temp;
3083 }
3084 return x;
3085}
3086\f
3087/* Emission of insns (adding them to the doubly-linked list). */
3088
15bbde2b 3089/* Return the last insn emitted, even if it is in a sequence now pushed. */
3090
3091rtx
35cb5232 3092get_last_insn_anywhere (void)
15bbde2b 3093{
3094 struct sequence_stack *stack;
06f9d6ef 3095 if (get_last_insn ())
3096 return get_last_insn ();
0a893c29 3097 for (stack = seq_stack; stack; stack = stack->next)
15bbde2b 3098 if (stack->last != 0)
3099 return stack->last;
3100 return 0;
3101}
3102
70545de4 3103/* Return the first nonnote insn emitted in current sequence or current
3104 function. This routine looks inside SEQUENCEs. */
3105
3106rtx
35cb5232 3107get_first_nonnote_insn (void)
70545de4 3108{
06f9d6ef 3109 rtx insn = get_insns ();
f86e856e 3110
3111 if (insn)
3112 {
3113 if (NOTE_P (insn))
3114 for (insn = next_insn (insn);
3115 insn && NOTE_P (insn);
3116 insn = next_insn (insn))
3117 continue;
3118 else
3119 {
1c14a50e 3120 if (NONJUMP_INSN_P (insn)
f86e856e 3121 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3122 insn = XVECEXP (PATTERN (insn), 0, 0);
3123 }
3124 }
70545de4 3125
3126 return insn;
3127}
3128
3129/* Return the last nonnote insn emitted in current sequence or current
3130 function. This routine looks inside SEQUENCEs. */
3131
3132rtx
35cb5232 3133get_last_nonnote_insn (void)
70545de4 3134{
06f9d6ef 3135 rtx insn = get_last_insn ();
f86e856e 3136
3137 if (insn)
3138 {
3139 if (NOTE_P (insn))
3140 for (insn = previous_insn (insn);
3141 insn && NOTE_P (insn);
3142 insn = previous_insn (insn))
3143 continue;
3144 else
3145 {
1c14a50e 3146 if (NONJUMP_INSN_P (insn)
f86e856e 3147 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3148 insn = XVECEXP (PATTERN (insn), 0,
3149 XVECLEN (PATTERN (insn), 0) - 1);
3150 }
3151 }
70545de4 3152
3153 return insn;
3154}
3155
9845d120 3156/* Return the number of actual (non-debug) insns emitted in this
3157 function. */
3158
3159int
3160get_max_insn_count (void)
3161{
3162 int n = cur_insn_uid;
3163
3164 /* The table size must be stable across -g, to avoid codegen
3165 differences due to debug insns, and not be affected by
3166 -fmin-insn-uid, to avoid excessive table size and to simplify
3167 debugging of -fcompare-debug failures. */
3168 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3169 n -= cur_debug_insn_uid;
3170 else
3171 n -= MIN_NONDEBUG_INSN_UID;
3172
3173 return n;
3174}
3175
15bbde2b 3176\f
3177/* Return the next insn. If it is a SEQUENCE, return the first insn
3178 of the sequence. */
3179
3180rtx
35cb5232 3181next_insn (rtx insn)
15bbde2b 3182{
ce4469fa 3183 if (insn)
3184 {
3185 insn = NEXT_INSN (insn);
3186 if (insn && NONJUMP_INSN_P (insn)
3187 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3188 insn = XVECEXP (PATTERN (insn), 0, 0);
3189 }
15bbde2b 3190
ce4469fa 3191 return insn;
15bbde2b 3192}
3193
3194/* Return the previous insn. If it is a SEQUENCE, return the last insn
3195 of the sequence. */
3196
3197rtx
35cb5232 3198previous_insn (rtx insn)
15bbde2b 3199{
ce4469fa 3200 if (insn)
3201 {
3202 insn = PREV_INSN (insn);
3203 if (insn && NONJUMP_INSN_P (insn)
3204 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3205 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3206 }
15bbde2b 3207
ce4469fa 3208 return insn;
15bbde2b 3209}
3210
3211/* Return the next insn after INSN that is not a NOTE. This routine does not
3212 look inside SEQUENCEs. */
3213
3214rtx
35cb5232 3215next_nonnote_insn (rtx insn)
15bbde2b 3216{
ce4469fa 3217 while (insn)
3218 {
3219 insn = NEXT_INSN (insn);
3220 if (insn == 0 || !NOTE_P (insn))
3221 break;
3222 }
15bbde2b 3223
ce4469fa 3224 return insn;
15bbde2b 3225}
3226
c4d13c5c 3227/* Return the next insn after INSN that is not a NOTE, but stop the
3228 search before we enter another basic block. This routine does not
3229 look inside SEQUENCEs. */
3230
3231rtx
3232next_nonnote_insn_bb (rtx insn)
3233{
3234 while (insn)
3235 {
3236 insn = NEXT_INSN (insn);
3237 if (insn == 0 || !NOTE_P (insn))
3238 break;
3239 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3240 return NULL_RTX;
3241 }
3242
3243 return insn;
3244}
3245
15bbde2b 3246/* Return the previous insn before INSN that is not a NOTE. This routine does
3247 not look inside SEQUENCEs. */
3248
3249rtx
35cb5232 3250prev_nonnote_insn (rtx insn)
15bbde2b 3251{
ce4469fa 3252 while (insn)
3253 {
3254 insn = PREV_INSN (insn);
3255 if (insn == 0 || !NOTE_P (insn))
3256 break;
3257 }
15bbde2b 3258
ce4469fa 3259 return insn;
15bbde2b 3260}
3261
bcc66782 3262/* Return the previous insn before INSN that is not a NOTE, but stop
3263 the search before we enter another basic block. This routine does
3264 not look inside SEQUENCEs. */
3265
3266rtx
3267prev_nonnote_insn_bb (rtx insn)
3268{
3269 while (insn)
3270 {
3271 insn = PREV_INSN (insn);
3272 if (insn == 0 || !NOTE_P (insn))
3273 break;
3274 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3275 return NULL_RTX;
3276 }
3277
3278 return insn;
3279}
3280
9845d120 3281/* Return the next insn after INSN that is not a DEBUG_INSN. This
3282 routine does not look inside SEQUENCEs. */
3283
3284rtx
3285next_nondebug_insn (rtx insn)
3286{
3287 while (insn)
3288 {
3289 insn = NEXT_INSN (insn);
3290 if (insn == 0 || !DEBUG_INSN_P (insn))
3291 break;
3292 }
3293
3294 return insn;
3295}
3296
3297/* Return the previous insn before INSN that is not a DEBUG_INSN.
3298 This routine does not look inside SEQUENCEs. */
3299
3300rtx
3301prev_nondebug_insn (rtx insn)
3302{
3303 while (insn)
3304 {
3305 insn = PREV_INSN (insn);
3306 if (insn == 0 || !DEBUG_INSN_P (insn))
3307 break;
3308 }
3309
3310 return insn;
3311}
3312
5b8537a8 3313/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3314 This routine does not look inside SEQUENCEs. */
3315
3316rtx
3317next_nonnote_nondebug_insn (rtx insn)
3318{
3319 while (insn)
3320 {
3321 insn = NEXT_INSN (insn);
3322 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3323 break;
3324 }
3325
3326 return insn;
3327}
3328
3329/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3330 This routine does not look inside SEQUENCEs. */
3331
3332rtx
3333prev_nonnote_nondebug_insn (rtx insn)
3334{
3335 while (insn)
3336 {
3337 insn = PREV_INSN (insn);
3338 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3339 break;
3340 }
3341
3342 return insn;
3343}
3344
15bbde2b 3345/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3346 or 0, if there is none. This routine does not look inside
a92771b8 3347 SEQUENCEs. */
15bbde2b 3348
3349rtx
35cb5232 3350next_real_insn (rtx insn)
15bbde2b 3351{
ce4469fa 3352 while (insn)
3353 {
3354 insn = NEXT_INSN (insn);
3355 if (insn == 0 || INSN_P (insn))
3356 break;
3357 }
15bbde2b 3358
ce4469fa 3359 return insn;
15bbde2b 3360}
3361
3362/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3363 or 0, if there is none. This routine does not look inside
3364 SEQUENCEs. */
3365
3366rtx
35cb5232 3367prev_real_insn (rtx insn)
15bbde2b 3368{
ce4469fa 3369 while (insn)
3370 {
3371 insn = PREV_INSN (insn);
3372 if (insn == 0 || INSN_P (insn))
3373 break;
3374 }
15bbde2b 3375
ce4469fa 3376 return insn;
15bbde2b 3377}
3378
d5f9786f 3379/* Return the last CALL_INSN in the current list, or 0 if there is none.
3380 This routine does not look inside SEQUENCEs. */
3381
3382rtx
35cb5232 3383last_call_insn (void)
d5f9786f 3384{
3385 rtx insn;
3386
3387 for (insn = get_last_insn ();
6d7dc5b9 3388 insn && !CALL_P (insn);
d5f9786f 3389 insn = PREV_INSN (insn))
3390 ;
3391
3392 return insn;
3393}
3394
15bbde2b 3395/* Find the next insn after INSN that really does something. This routine
084950ee 3396 does not look inside SEQUENCEs. After reload this also skips over
3397 standalone USE and CLOBBER insn. */
15bbde2b 3398
2215ca0d 3399int
52d07779 3400active_insn_p (const_rtx insn)
2215ca0d 3401{
6d7dc5b9 3402 return (CALL_P (insn) || JUMP_P (insn)
91f71fa3 3403 || JUMP_TABLE_DATA_P (insn) /* FIXME */
6d7dc5b9 3404 || (NONJUMP_INSN_P (insn)
3a66feab 3405 && (! reload_completed
3406 || (GET_CODE (PATTERN (insn)) != USE
3407 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3408}
3409
15bbde2b 3410rtx
35cb5232 3411next_active_insn (rtx insn)
15bbde2b 3412{
ce4469fa 3413 while (insn)
3414 {
3415 insn = NEXT_INSN (insn);
3416 if (insn == 0 || active_insn_p (insn))
3417 break;
3418 }
15bbde2b 3419
ce4469fa 3420 return insn;
15bbde2b 3421}
3422
3423/* Find the last insn before INSN that really does something. This routine
084950ee 3424 does not look inside SEQUENCEs. After reload this also skips over
3425 standalone USE and CLOBBER insn. */
15bbde2b 3426
3427rtx
35cb5232 3428prev_active_insn (rtx insn)
15bbde2b 3429{
ce4469fa 3430 while (insn)
3431 {
3432 insn = PREV_INSN (insn);
3433 if (insn == 0 || active_insn_p (insn))
3434 break;
3435 }
15bbde2b 3436
ce4469fa 3437 return insn;
15bbde2b 3438}
15bbde2b 3439\f
3440#ifdef HAVE_cc0
3441/* Return the next insn that uses CC0 after INSN, which is assumed to
3442 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3443 applied to the result of this function should yield INSN).
3444
3445 Normally, this is simply the next insn. However, if a REG_CC_USER note
3446 is present, it contains the insn that uses CC0.
3447
3448 Return 0 if we can't find the insn. */
3449
3450rtx
35cb5232 3451next_cc0_user (rtx insn)
15bbde2b 3452{
b572011e 3453 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3454
3455 if (note)
3456 return XEXP (note, 0);
3457
3458 insn = next_nonnote_insn (insn);
6d7dc5b9 3459 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
15bbde2b 3460 insn = XVECEXP (PATTERN (insn), 0, 0);
3461
9204e736 3462 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
15bbde2b 3463 return insn;
3464
3465 return 0;
3466}
3467
3468/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3469 note, it is the previous insn. */
3470
3471rtx
35cb5232 3472prev_cc0_setter (rtx insn)
15bbde2b 3473{
b572011e 3474 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3475
3476 if (note)
3477 return XEXP (note, 0);
3478
3479 insn = prev_nonnote_insn (insn);
611234b4 3480 gcc_assert (sets_cc0_p (PATTERN (insn)));
15bbde2b 3481
3482 return insn;
3483}
3484#endif
344dc2fa 3485
698ff1f0 3486#ifdef AUTO_INC_DEC
3487/* Find a RTX_AUTOINC class rtx which matches DATA. */
3488
3489static int
3490find_auto_inc (rtx *xp, void *data)
3491{
3492 rtx x = *xp;
225ab426 3493 rtx reg = (rtx) data;
698ff1f0 3494
3495 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3496 return 0;
3497
3498 switch (GET_CODE (x))
3499 {
3500 case PRE_DEC:
3501 case PRE_INC:
3502 case POST_DEC:
3503 case POST_INC:
3504 case PRE_MODIFY:
3505 case POST_MODIFY:
3506 if (rtx_equal_p (reg, XEXP (x, 0)))
3507 return 1;
3508 break;
3509
3510 default:
3511 gcc_unreachable ();
3512 }
3513 return -1;
3514}
3515#endif
3516
344dc2fa 3517/* Increment the label uses for all labels present in rtx. */
3518
3519static void
35cb5232 3520mark_label_nuses (rtx x)
344dc2fa 3521{
19cb6b50 3522 enum rtx_code code;
3523 int i, j;
3524 const char *fmt;
344dc2fa 3525
3526 code = GET_CODE (x);
a030d4a8 3527 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
344dc2fa 3528 LABEL_NUSES (XEXP (x, 0))++;
3529
3530 fmt = GET_RTX_FORMAT (code);
3531 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3532 {
3533 if (fmt[i] == 'e')
ff385626 3534 mark_label_nuses (XEXP (x, i));
344dc2fa 3535 else if (fmt[i] == 'E')
ff385626 3536 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3537 mark_label_nuses (XVECEXP (x, i, j));
3538 }
3539}
3540
15bbde2b 3541\f
3542/* Try splitting insns that can be split for better scheduling.
3543 PAT is the pattern which might split.
3544 TRIAL is the insn providing PAT.
6ef828f9 3545 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3546
3547 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3548 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3549 returns TRIAL. If the insn to be returned can be split, it will be. */
3550
3551rtx
35cb5232 3552try_split (rtx pat, rtx trial, int last)
15bbde2b 3553{
3554 rtx before = PREV_INSN (trial);
3555 rtx after = NEXT_INSN (trial);
15bbde2b 3556 int has_barrier = 0;
1e5b92fa 3557 rtx note, seq, tem;
3cd757b1 3558 int probability;
e13693ec 3559 rtx insn_last, insn;
3560 int njumps = 0;
3cd757b1 3561
25e880b1 3562 /* We're not good at redistributing frame information. */
3563 if (RTX_FRAME_RELATED_P (trial))
3564 return trial;
3565
3cd757b1 3566 if (any_condjump_p (trial)
3567 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
9eb946de 3568 split_branch_probability = XINT (note, 0);
3cd757b1 3569 probability = split_branch_probability;
3570
3571 seq = split_insns (pat, trial);
3572
3573 split_branch_probability = -1;
15bbde2b 3574
3575 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3576 We may need to handle this specially. */
6d7dc5b9 3577 if (after && BARRIER_P (after))
15bbde2b 3578 {
3579 has_barrier = 1;
3580 after = NEXT_INSN (after);
3581 }
3582
e13693ec 3583 if (!seq)
3584 return trial;
3585
3586 /* Avoid infinite loop if any insn of the result matches
3587 the original pattern. */
3588 insn_last = seq;
3589 while (1)
15bbde2b 3590 {
e13693ec 3591 if (INSN_P (insn_last)
3592 && rtx_equal_p (PATTERN (insn_last), pat))
3593 return trial;
3594 if (!NEXT_INSN (insn_last))
3595 break;
3596 insn_last = NEXT_INSN (insn_last);
3597 }
d823ba47 3598
3072d30e 3599 /* We will be adding the new sequence to the function. The splitters
3600 may have introduced invalid RTL sharing, so unshare the sequence now. */
3601 unshare_all_rtl_in_chain (seq);
3602
e13693ec 3603 /* Mark labels. */
3604 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3605 {
6d7dc5b9 3606 if (JUMP_P (insn))
e13693ec 3607 {
3608 mark_jump_label (PATTERN (insn), insn, 0);
3609 njumps++;
3610 if (probability != -1
3611 && any_condjump_p (insn)
3612 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3613 {
e13693ec 3614 /* We can preserve the REG_BR_PROB notes only if exactly
3615 one jump is created, otherwise the machine description
3616 is responsible for this step using
3617 split_branch_probability variable. */
611234b4 3618 gcc_assert (njumps == 1);
9eb946de 3619 add_int_reg_note (insn, REG_BR_PROB, probability);
31d3e01c 3620 }
e13693ec 3621 }
3622 }
3623
3624 /* If we are splitting a CALL_INSN, look for the CALL_INSN
b0bd0491 3625 in SEQ and copy any additional information across. */
6d7dc5b9 3626 if (CALL_P (trial))
e13693ec 3627 {
3628 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
6d7dc5b9 3629 if (CALL_P (insn))
e13693ec 3630 {
b0bd0491 3631 rtx next, *p;
3632
3633 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3634 target may have explicitly specified. */
3635 p = &CALL_INSN_FUNCTION_USAGE (insn);
0bb5a6cd 3636 while (*p)
3637 p = &XEXP (*p, 1);
3638 *p = CALL_INSN_FUNCTION_USAGE (trial);
b0bd0491 3639
3640 /* If the old call was a sibling call, the new one must
3641 be too. */
e13693ec 3642 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
b0bd0491 3643
3644 /* If the new call is the last instruction in the sequence,
3645 it will effectively replace the old call in-situ. Otherwise
3646 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3647 so that it comes immediately after the new call. */
3648 if (NEXT_INSN (insn))
47e1410d 3649 for (next = NEXT_INSN (trial);
3650 next && NOTE_P (next);
3651 next = NEXT_INSN (next))
3652 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
b0bd0491 3653 {
3654 remove_insn (next);
3655 add_insn_after (next, insn, NULL);
47e1410d 3656 break;
b0bd0491 3657 }
e13693ec 3658 }
3659 }
5262c253 3660
e13693ec 3661 /* Copy notes, particularly those related to the CFG. */
3662 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3663 {
3664 switch (REG_NOTE_KIND (note))
3665 {
3666 case REG_EH_REGION:
e38def9c 3667 copy_reg_eh_region_note_backward (note, insn_last, NULL);
e13693ec 3668 break;
381eb1e7 3669
e13693ec 3670 case REG_NORETURN:
3671 case REG_SETJMP:
4c0315d0 3672 case REG_TM:
698ff1f0 3673 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
381eb1e7 3674 {
6d7dc5b9 3675 if (CALL_P (insn))
a1ddb869 3676 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
381eb1e7 3677 }
e13693ec 3678 break;
5bb27a4b 3679
e13693ec 3680 case REG_NON_LOCAL_GOTO:
aa78dca5 3681 case REG_CROSSING_JUMP:
698ff1f0 3682 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
31d3e01c 3683 {
6d7dc5b9 3684 if (JUMP_P (insn))
a1ddb869 3685 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
31d3e01c 3686 }
e13693ec 3687 break;
344dc2fa 3688
698ff1f0 3689#ifdef AUTO_INC_DEC
3690 case REG_INC:
3691 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3692 {
3693 rtx reg = XEXP (note, 0);
3694 if (!FIND_REG_INC_NOTE (insn, reg)
3695 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
a1ddb869 3696 add_reg_note (insn, REG_INC, reg);
698ff1f0 3697 }
3698 break;
3699#endif
3700
dfe00a8f 3701 case REG_ARGS_SIZE:
3702 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3703 break;
3704
e13693ec 3705 default:
3706 break;
15bbde2b 3707 }
e13693ec 3708 }
3709
3710 /* If there are LABELS inside the split insns increment the
3711 usage count so we don't delete the label. */
19d2fe05 3712 if (INSN_P (trial))
e13693ec 3713 {
3714 insn = insn_last;
3715 while (insn != NULL_RTX)
15bbde2b 3716 {
19d2fe05 3717 /* JUMP_P insns have already been "marked" above. */
6d7dc5b9 3718 if (NONJUMP_INSN_P (insn))
e13693ec 3719 mark_label_nuses (PATTERN (insn));
15bbde2b 3720
e13693ec 3721 insn = PREV_INSN (insn);
3722 }
15bbde2b 3723 }
3724
5169661d 3725 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
e13693ec 3726
3727 delete_insn (trial);
3728 if (has_barrier)
3729 emit_barrier_after (tem);
3730
3731 /* Recursively call try_split for each new insn created; by the
3732 time control returns here that insn will be fully split, so
3733 set LAST and continue from the insn after the one returned.
3734 We can't use next_active_insn here since AFTER may be a note.
3735 Ignore deleted insns, which can be occur if not optimizing. */
3736 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3737 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3738 tem = try_split (PATTERN (tem), tem, 1);
3739
3740 /* Return either the first or the last insn, depending on which was
3741 requested. */
3742 return last
06f9d6ef 3743 ? (after ? PREV_INSN (after) : get_last_insn ())
e13693ec 3744 : NEXT_INSN (before);
15bbde2b 3745}
3746\f
3747/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3748 Store PATTERN in the pattern slots. */
15bbde2b 3749
3750rtx
35cb5232 3751make_insn_raw (rtx pattern)
15bbde2b 3752{
19cb6b50 3753 rtx insn;
15bbde2b 3754
d7c47c0e 3755 insn = rtx_alloc (INSN);
15bbde2b 3756
575333f9 3757 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3758 PATTERN (insn) = pattern;
3759 INSN_CODE (insn) = -1;
fc92fa61 3760 REG_NOTES (insn) = NULL;
5169661d 3761 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3762 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3763
fe7f701d 3764#ifdef ENABLE_RTL_CHECKING
3765 if (insn
9204e736 3766 && INSN_P (insn)
fe7f701d 3767 && (returnjump_p (insn)
3768 || (GET_CODE (insn) == SET
3769 && SET_DEST (insn) == pc_rtx)))
3770 {
c3ceba8e 3771 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
fe7f701d 3772 debug_rtx (insn);
3773 }
3774#endif
d823ba47 3775
15bbde2b 3776 return insn;
3777}
3778
9845d120 3779/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3780
e4fdf07d 3781static rtx
9845d120 3782make_debug_insn_raw (rtx pattern)
3783{
3784 rtx insn;
3785
3786 insn = rtx_alloc (DEBUG_INSN);
3787 INSN_UID (insn) = cur_debug_insn_uid++;
3788 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3789 INSN_UID (insn) = cur_insn_uid++;
3790
3791 PATTERN (insn) = pattern;
3792 INSN_CODE (insn) = -1;
3793 REG_NOTES (insn) = NULL;
5169661d 3794 INSN_LOCATION (insn) = curr_insn_location ();
9845d120 3795 BLOCK_FOR_INSN (insn) = NULL;
3796
3797 return insn;
3798}
3799
31d3e01c 3800/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3801
e4fdf07d 3802static rtx
35cb5232 3803make_jump_insn_raw (rtx pattern)
15bbde2b 3804{
19cb6b50 3805 rtx insn;
15bbde2b 3806
6a84e367 3807 insn = rtx_alloc (JUMP_INSN);
fc92fa61 3808 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3809
3810 PATTERN (insn) = pattern;
3811 INSN_CODE (insn) = -1;
fc92fa61 3812 REG_NOTES (insn) = NULL;
3813 JUMP_LABEL (insn) = NULL;
5169661d 3814 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3815 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3816
3817 return insn;
3818}
6e911104 3819
31d3e01c 3820/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3821
3822static rtx
35cb5232 3823make_call_insn_raw (rtx pattern)
6e911104 3824{
19cb6b50 3825 rtx insn;
6e911104 3826
3827 insn = rtx_alloc (CALL_INSN);
3828 INSN_UID (insn) = cur_insn_uid++;
3829
3830 PATTERN (insn) = pattern;
3831 INSN_CODE (insn) = -1;
6e911104 3832 REG_NOTES (insn) = NULL;
3833 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5169661d 3834 INSN_LOCATION (insn) = curr_insn_location ();
ab87d1bc 3835 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3836
3837 return insn;
3838}
35f3420b 3839
3840/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3841
3842static rtx
3843make_note_raw (enum insn_note subtype)
3844{
3845 /* Some notes are never created this way at all. These notes are
3846 only created by patching out insns. */
3847 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3848 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3849
3850 rtx note = rtx_alloc (NOTE);
3851 INSN_UID (note) = cur_insn_uid++;
3852 NOTE_KIND (note) = subtype;
3853 BLOCK_FOR_INSN (note) = NULL;
3854 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3855 return note;
3856}
15bbde2b 3857\f
35f3420b 3858/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3859 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3860 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3861
3862static inline void
3863link_insn_into_chain (rtx insn, rtx prev, rtx next)
3864{
3865 PREV_INSN (insn) = prev;
3866 NEXT_INSN (insn) = next;
3867 if (prev != NULL)
3868 {
3869 NEXT_INSN (prev) = insn;
3870 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3871 {
3872 rtx sequence = PATTERN (prev);
3873 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3874 }
3875 }
3876 if (next != NULL)
3877 {
3878 PREV_INSN (next) = insn;
3879 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3880 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3881 }
34f5b9ac 3882
3883 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3884 {
3885 rtx sequence = PATTERN (insn);
3886 PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3887 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3888 }
35f3420b 3889}
3890
15bbde2b 3891/* Add INSN to the end of the doubly-linked list.
3892 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3893
3894void
35cb5232 3895add_insn (rtx insn)
15bbde2b 3896{
35f3420b 3897 rtx prev = get_last_insn ();
3898 link_insn_into_chain (insn, prev, NULL);
06f9d6ef 3899 if (NULL == get_insns ())
3900 set_first_insn (insn);
06f9d6ef 3901 set_last_insn (insn);
15bbde2b 3902}
3903
35f3420b 3904/* Add INSN into the doubly-linked list after insn AFTER. */
15bbde2b 3905
35f3420b 3906static void
3907add_insn_after_nobb (rtx insn, rtx after)
15bbde2b 3908{
3909 rtx next = NEXT_INSN (after);
3910
611234b4 3911 gcc_assert (!optimize || !INSN_DELETED_P (after));
f65c10c0 3912
35f3420b 3913 link_insn_into_chain (insn, after, next);
15bbde2b 3914
35f3420b 3915 if (next == NULL)
15bbde2b 3916 {
35f3420b 3917 if (get_last_insn () == after)
3918 set_last_insn (insn);
3919 else
3920 {
3921 struct sequence_stack *stack = seq_stack;
3922 /* Scan all pending sequences too. */
3923 for (; stack; stack = stack->next)
3924 if (after == stack->last)
3925 {
3926 stack->last = insn;
3927 break;
3928 }
3929 }
15bbde2b 3930 }
35f3420b 3931}
3932
3933/* Add INSN into the doubly-linked list before insn BEFORE. */
3934
3935static void
3936add_insn_before_nobb (rtx insn, rtx before)
3937{
3938 rtx prev = PREV_INSN (before);
3939
3940 gcc_assert (!optimize || !INSN_DELETED_P (before));
3941
3942 link_insn_into_chain (insn, prev, before);
3943
3944 if (prev == NULL)
15bbde2b 3945 {
35f3420b 3946 if (get_insns () == before)
3947 set_first_insn (insn);
3948 else
3949 {
3950 struct sequence_stack *stack = seq_stack;
3951 /* Scan all pending sequences too. */
3952 for (; stack; stack = stack->next)
3953 if (before == stack->first)
3954 {
3955 stack->first = insn;
3956 break;
3957 }
312de84d 3958
35f3420b 3959 gcc_assert (stack);
3960 }
15bbde2b 3961 }
35f3420b 3962}
3963
3964/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3965 If BB is NULL, an attempt is made to infer the bb from before.
3966
3967 This and the next function should be the only functions called
3968 to insert an insn once delay slots have been filled since only
3969 they know how to update a SEQUENCE. */
15bbde2b 3970
35f3420b 3971void
3972add_insn_after (rtx insn, rtx after, basic_block bb)
3973{
3974 add_insn_after_nobb (insn, after);
6d7dc5b9 3975 if (!BARRIER_P (after)
3976 && !BARRIER_P (insn)
9dda7915 3977 && (bb = BLOCK_FOR_INSN (after)))
3978 {
3979 set_block_for_insn (insn, bb);
308f9b79 3980 if (INSN_P (insn))
3072d30e 3981 df_insn_rescan (insn);
9dda7915 3982 /* Should not happen as first in the BB is always
3fb1e43b 3983 either NOTE or LABEL. */
5496dbfc 3984 if (BB_END (bb) == after
9dda7915 3985 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 3986 && !BARRIER_P (insn)
ad4583d9 3987 && !NOTE_INSN_BASIC_BLOCK_P (insn))
5496dbfc 3988 BB_END (bb) = insn;
9dda7915 3989 }
15bbde2b 3990}
3991
35f3420b 3992/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
3993 If BB is NULL, an attempt is made to infer the bb from before.
3994
3995 This and the previous function should be the only functions called
3996 to insert an insn once delay slots have been filled since only
3997 they know how to update a SEQUENCE. */
312de84d 3998
3999void
3072d30e 4000add_insn_before (rtx insn, rtx before, basic_block bb)
312de84d 4001{
35f3420b 4002 add_insn_before_nobb (insn, before);
312de84d 4003
48e1416a 4004 if (!bb
3072d30e 4005 && !BARRIER_P (before)
4006 && !BARRIER_P (insn))
4007 bb = BLOCK_FOR_INSN (before);
4008
4009 if (bb)
9dda7915 4010 {
4011 set_block_for_insn (insn, bb);
308f9b79 4012 if (INSN_P (insn))
3072d30e 4013 df_insn_rescan (insn);
611234b4 4014 /* Should not happen as first in the BB is always either NOTE or
ba821eb1 4015 LABEL. */
611234b4 4016 gcc_assert (BB_HEAD (bb) != insn
4017 /* Avoid clobbering of structure when creating new BB. */
4018 || BARRIER_P (insn)
ad4583d9 4019 || NOTE_INSN_BASIC_BLOCK_P (insn));
9dda7915 4020 }
312de84d 4021}
4022
3072d30e 4023/* Replace insn with an deleted instruction note. */
4024
fc3d1695 4025void
4026set_insn_deleted (rtx insn)
3072d30e 4027{
91f71fa3 4028 if (INSN_P (insn))
b983ea33 4029 df_insn_delete (insn);
3072d30e 4030 PUT_CODE (insn, NOTE);
4031 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4032}
4033
4034
93ff53d3 4035/* Unlink INSN from the insn chain.
4036
4037 This function knows how to handle sequences.
4038
4039 This function does not invalidate data flow information associated with
4040 INSN (i.e. does not call df_insn_delete). That makes this function
4041 usable for only disconnecting an insn from the chain, and re-emit it
4042 elsewhere later.
4043
4044 To later insert INSN elsewhere in the insn chain via add_insn and
4045 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4046 the caller. Nullifying them here breaks many insn chain walks.
4047
4048 To really delete an insn and related DF information, use delete_insn. */
4049
7ddcf2bf 4050void
35cb5232 4051remove_insn (rtx insn)
7ddcf2bf 4052{
4053 rtx next = NEXT_INSN (insn);
4054 rtx prev = PREV_INSN (insn);
e4bf866d 4055 basic_block bb;
4056
7ddcf2bf 4057 if (prev)
4058 {
4059 NEXT_INSN (prev) = next;
6d7dc5b9 4060 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
7ddcf2bf 4061 {
4062 rtx sequence = PATTERN (prev);
4063 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
4064 }
4065 }
06f9d6ef 4066 else if (get_insns () == insn)
4067 {
c8f0c143 4068 if (next)
4069 PREV_INSN (next) = NULL;
06f9d6ef 4070 set_first_insn (next);
4071 }
7ddcf2bf 4072 else
4073 {
0a893c29 4074 struct sequence_stack *stack = seq_stack;
7ddcf2bf 4075 /* Scan all pending sequences too. */
4076 for (; stack; stack = stack->next)
4077 if (insn == stack->first)
4078 {
4079 stack->first = next;
4080 break;
4081 }
4082
611234b4 4083 gcc_assert (stack);
7ddcf2bf 4084 }
4085
4086 if (next)
4087 {
4088 PREV_INSN (next) = prev;
6d7dc5b9 4089 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
7ddcf2bf 4090 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4091 }
06f9d6ef 4092 else if (get_last_insn () == insn)
4093 set_last_insn (prev);
7ddcf2bf 4094 else
4095 {
0a893c29 4096 struct sequence_stack *stack = seq_stack;
7ddcf2bf 4097 /* Scan all pending sequences too. */
4098 for (; stack; stack = stack->next)
4099 if (insn == stack->last)
4100 {
4101 stack->last = prev;
4102 break;
4103 }
4104
611234b4 4105 gcc_assert (stack);
7ddcf2bf 4106 }
b983ea33 4107
b983ea33 4108 /* Fix up basic block boundaries, if necessary. */
6d7dc5b9 4109 if (!BARRIER_P (insn)
e4bf866d 4110 && (bb = BLOCK_FOR_INSN (insn)))
4111 {
5496dbfc 4112 if (BB_HEAD (bb) == insn)
e4bf866d 4113 {
f4aee538 4114 /* Never ever delete the basic block note without deleting whole
4115 basic block. */
611234b4 4116 gcc_assert (!NOTE_P (insn));
5496dbfc 4117 BB_HEAD (bb) = next;
e4bf866d 4118 }
5496dbfc 4119 if (BB_END (bb) == insn)
4120 BB_END (bb) = prev;
e4bf866d 4121 }
7ddcf2bf 4122}
4123
d5f9786f 4124/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4125
4126void
35cb5232 4127add_function_usage_to (rtx call_insn, rtx call_fusage)
d5f9786f 4128{
611234b4 4129 gcc_assert (call_insn && CALL_P (call_insn));
d5f9786f 4130
4131 /* Put the register usage information on the CALL. If there is already
4132 some usage information, put ours at the end. */
4133 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4134 {
4135 rtx link;
4136
4137 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4138 link = XEXP (link, 1))
4139 ;
4140
4141 XEXP (link, 1) = call_fusage;
4142 }
4143 else
4144 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4145}
4146
15bbde2b 4147/* Delete all insns made since FROM.
4148 FROM becomes the new last instruction. */
4149
4150void
35cb5232 4151delete_insns_since (rtx from)
15bbde2b 4152{
4153 if (from == 0)
06f9d6ef 4154 set_first_insn (0);
15bbde2b 4155 else
4156 NEXT_INSN (from) = 0;
06f9d6ef 4157 set_last_insn (from);
15bbde2b 4158}
4159
34e2ddcd 4160/* This function is deprecated, please use sequences instead.
4161
4162 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 4163 The insns to be moved are those between FROM and TO.
4164 They are moved to a new position after the insn AFTER.
4165 AFTER must not be FROM or TO or any insn in between.
4166
4167 This function does not know about SEQUENCEs and hence should not be
4168 called after delay-slot filling has been done. */
4169
4170void
35cb5232 4171reorder_insns_nobb (rtx from, rtx to, rtx after)
15bbde2b 4172{
7f6ca11f 4173#ifdef ENABLE_CHECKING
4174 rtx x;
4175 for (x = from; x != to; x = NEXT_INSN (x))
4176 gcc_assert (after != x);
4177 gcc_assert (after != to);
4178#endif
4179
15bbde2b 4180 /* Splice this bunch out of where it is now. */
4181 if (PREV_INSN (from))
4182 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4183 if (NEXT_INSN (to))
4184 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
06f9d6ef 4185 if (get_last_insn () == to)
4186 set_last_insn (PREV_INSN (from));
4187 if (get_insns () == from)
4188 set_first_insn (NEXT_INSN (to));
15bbde2b 4189
4190 /* Make the new neighbors point to it and it to them. */
4191 if (NEXT_INSN (after))
4192 PREV_INSN (NEXT_INSN (after)) = to;
4193
4194 NEXT_INSN (to) = NEXT_INSN (after);
4195 PREV_INSN (from) = after;
4196 NEXT_INSN (after) = from;
9af5ce0c 4197 if (after == get_last_insn ())
06f9d6ef 4198 set_last_insn (to);
15bbde2b 4199}
4200
9dda7915 4201/* Same as function above, but take care to update BB boundaries. */
4202void
35cb5232 4203reorder_insns (rtx from, rtx to, rtx after)
9dda7915 4204{
4205 rtx prev = PREV_INSN (from);
4206 basic_block bb, bb2;
4207
4208 reorder_insns_nobb (from, to, after);
4209
6d7dc5b9 4210 if (!BARRIER_P (after)
9dda7915 4211 && (bb = BLOCK_FOR_INSN (after)))
4212 {
4213 rtx x;
3072d30e 4214 df_set_bb_dirty (bb);
d4c5e26d 4215
6d7dc5b9 4216 if (!BARRIER_P (from)
9dda7915 4217 && (bb2 = BLOCK_FOR_INSN (from)))
4218 {
5496dbfc 4219 if (BB_END (bb2) == to)
4220 BB_END (bb2) = prev;
3072d30e 4221 df_set_bb_dirty (bb2);
9dda7915 4222 }
4223
5496dbfc 4224 if (BB_END (bb) == after)
4225 BB_END (bb) = to;
9dda7915 4226
4227 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7097dd0c 4228 if (!BARRIER_P (x))
a2bdd643 4229 df_insn_change_bb (x, bb);
9dda7915 4230 }
4231}
4232
15bbde2b 4233\f
31d3e01c 4234/* Emit insn(s) of given code and pattern
4235 at a specified place within the doubly-linked list.
15bbde2b 4236
31d3e01c 4237 All of the emit_foo global entry points accept an object
4238 X which is either an insn list or a PATTERN of a single
4239 instruction.
15bbde2b 4240
31d3e01c 4241 There are thus a few canonical ways to generate code and
4242 emit it at a specific place in the instruction stream. For
4243 example, consider the instruction named SPOT and the fact that
4244 we would like to emit some instructions before SPOT. We might
4245 do it like this:
15bbde2b 4246
31d3e01c 4247 start_sequence ();
4248 ... emit the new instructions ...
4249 insns_head = get_insns ();
4250 end_sequence ();
15bbde2b 4251
31d3e01c 4252 emit_insn_before (insns_head, SPOT);
15bbde2b 4253
31d3e01c 4254 It used to be common to generate SEQUENCE rtl instead, but that
4255 is a relic of the past which no longer occurs. The reason is that
4256 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4257 generated would almost certainly die right after it was created. */
15bbde2b 4258
5f7c5ddd 4259static rtx
4260emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4261 rtx (*make_raw) (rtx))
15bbde2b 4262{
19cb6b50 4263 rtx insn;
15bbde2b 4264
611234b4 4265 gcc_assert (before);
31d3e01c 4266
4267 if (x == NULL_RTX)
4268 return last;
4269
4270 switch (GET_CODE (x))
15bbde2b 4271 {
9845d120 4272 case DEBUG_INSN:
31d3e01c 4273 case INSN:
4274 case JUMP_INSN:
4275 case CALL_INSN:
4276 case CODE_LABEL:
4277 case BARRIER:
4278 case NOTE:
4279 insn = x;
4280 while (insn)
4281 {
4282 rtx next = NEXT_INSN (insn);
3072d30e 4283 add_insn_before (insn, before, bb);
31d3e01c 4284 last = insn;
4285 insn = next;
4286 }
4287 break;
4288
4289#ifdef ENABLE_RTL_CHECKING
4290 case SEQUENCE:
611234b4 4291 gcc_unreachable ();
31d3e01c 4292 break;
4293#endif
4294
4295 default:
5f7c5ddd 4296 last = (*make_raw) (x);
3072d30e 4297 add_insn_before (last, before, bb);
31d3e01c 4298 break;
15bbde2b 4299 }
4300
31d3e01c 4301 return last;
15bbde2b 4302}
4303
5f7c5ddd 4304/* Make X be output before the instruction BEFORE. */
4305
4306rtx
4307emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4308{
4309 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4310}
4311
31d3e01c 4312/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4313 and output it before the instruction BEFORE. */
4314
4315rtx
0891f67c 4316emit_jump_insn_before_noloc (rtx x, rtx before)
15bbde2b 4317{
5f7c5ddd 4318 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4319 make_jump_insn_raw);
15bbde2b 4320}
4321
31d3e01c 4322/* Make an instruction with body X and code CALL_INSN
cd0fe062 4323 and output it before the instruction BEFORE. */
4324
4325rtx
0891f67c 4326emit_call_insn_before_noloc (rtx x, rtx before)
cd0fe062 4327{
5f7c5ddd 4328 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4329 make_call_insn_raw);
cd0fe062 4330}
4331
9845d120 4332/* Make an instruction with body X and code DEBUG_INSN
4333 and output it before the instruction BEFORE. */
4334
4335rtx
4336emit_debug_insn_before_noloc (rtx x, rtx before)
4337{
5f7c5ddd 4338 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4339 make_debug_insn_raw);
9845d120 4340}
4341
15bbde2b 4342/* Make an insn of code BARRIER
71caadc0 4343 and output it before the insn BEFORE. */
15bbde2b 4344
4345rtx
35cb5232 4346emit_barrier_before (rtx before)
15bbde2b 4347{
19cb6b50 4348 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4349
4350 INSN_UID (insn) = cur_insn_uid++;
4351
3072d30e 4352 add_insn_before (insn, before, NULL);
15bbde2b 4353 return insn;
4354}
4355
71caadc0 4356/* Emit the label LABEL before the insn BEFORE. */
4357
4358rtx
35cb5232 4359emit_label_before (rtx label, rtx before)
71caadc0 4360{
596ef494 4361 gcc_checking_assert (INSN_UID (label) == 0);
4362 INSN_UID (label) = cur_insn_uid++;
4363 add_insn_before (label, before, NULL);
71caadc0 4364 return label;
4365}
15bbde2b 4366\f
31d3e01c 4367/* Helper for emit_insn_after, handles lists of instructions
4368 efficiently. */
15bbde2b 4369
31d3e01c 4370static rtx
3072d30e 4371emit_insn_after_1 (rtx first, rtx after, basic_block bb)
15bbde2b 4372{
31d3e01c 4373 rtx last;
4374 rtx after_after;
3072d30e 4375 if (!bb && !BARRIER_P (after))
4376 bb = BLOCK_FOR_INSN (after);
15bbde2b 4377
3072d30e 4378 if (bb)
15bbde2b 4379 {
3072d30e 4380 df_set_bb_dirty (bb);
31d3e01c 4381 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
6d7dc5b9 4382 if (!BARRIER_P (last))
3072d30e 4383 {
4384 set_block_for_insn (last, bb);
4385 df_insn_rescan (last);
4386 }
6d7dc5b9 4387 if (!BARRIER_P (last))
3072d30e 4388 {
4389 set_block_for_insn (last, bb);
4390 df_insn_rescan (last);
4391 }
5496dbfc 4392 if (BB_END (bb) == after)
4393 BB_END (bb) = last;
15bbde2b 4394 }
4395 else
31d3e01c 4396 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4397 continue;
4398
4399 after_after = NEXT_INSN (after);
4400
4401 NEXT_INSN (after) = first;
4402 PREV_INSN (first) = after;
4403 NEXT_INSN (last) = after_after;
4404 if (after_after)
4405 PREV_INSN (after_after) = last;
4406
9af5ce0c 4407 if (after == get_last_insn ())
06f9d6ef 4408 set_last_insn (last);
e1ab7874 4409
31d3e01c 4410 return last;
4411}
4412
5f7c5ddd 4413static rtx
4414emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4415 rtx (*make_raw)(rtx))
31d3e01c 4416{
4417 rtx last = after;
4418
611234b4 4419 gcc_assert (after);
31d3e01c 4420
4421 if (x == NULL_RTX)
4422 return last;
4423
4424 switch (GET_CODE (x))
15bbde2b 4425 {
9845d120 4426 case DEBUG_INSN:
31d3e01c 4427 case INSN:
4428 case JUMP_INSN:
4429 case CALL_INSN:
4430 case CODE_LABEL:
4431 case BARRIER:
4432 case NOTE:
3072d30e 4433 last = emit_insn_after_1 (x, after, bb);
31d3e01c 4434 break;
4435
4436#ifdef ENABLE_RTL_CHECKING
4437 case SEQUENCE:
611234b4 4438 gcc_unreachable ();
31d3e01c 4439 break;
4440#endif
4441
4442 default:
5f7c5ddd 4443 last = (*make_raw) (x);
3072d30e 4444 add_insn_after (last, after, bb);
31d3e01c 4445 break;
15bbde2b 4446 }
4447
31d3e01c 4448 return last;
15bbde2b 4449}
4450
5f7c5ddd 4451/* Make X be output after the insn AFTER and set the BB of insn. If
4452 BB is NULL, an attempt is made to infer the BB from AFTER. */
4453
4454rtx
4455emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4456{
4457 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4458}
4459
1bea98fb 4460
31d3e01c 4461/* Make an insn of code JUMP_INSN with body X
15bbde2b 4462 and output it after the insn AFTER. */
4463
4464rtx
0891f67c 4465emit_jump_insn_after_noloc (rtx x, rtx after)
15bbde2b 4466{
5f7c5ddd 4467 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
31d3e01c 4468}
4469
4470/* Make an instruction with body X and code CALL_INSN
4471 and output it after the instruction AFTER. */
4472
4473rtx
0891f67c 4474emit_call_insn_after_noloc (rtx x, rtx after)
31d3e01c 4475{
5f7c5ddd 4476 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
15bbde2b 4477}
4478
9845d120 4479/* Make an instruction with body X and code CALL_INSN
4480 and output it after the instruction AFTER. */
4481
4482rtx
4483emit_debug_insn_after_noloc (rtx x, rtx after)
4484{
5f7c5ddd 4485 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
9845d120 4486}
4487
15bbde2b 4488/* Make an insn of code BARRIER
4489 and output it after the insn AFTER. */
4490
4491rtx
35cb5232 4492emit_barrier_after (rtx after)
15bbde2b 4493{
19cb6b50 4494 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4495
4496 INSN_UID (insn) = cur_insn_uid++;
4497
3072d30e 4498 add_insn_after (insn, after, NULL);
15bbde2b 4499 return insn;
4500}
4501
4502/* Emit the label LABEL after the insn AFTER. */
4503
4504rtx
35cb5232 4505emit_label_after (rtx label, rtx after)
15bbde2b 4506{
596ef494 4507 gcc_checking_assert (INSN_UID (label) == 0);
4508 INSN_UID (label) = cur_insn_uid++;
4509 add_insn_after (label, after, NULL);
15bbde2b 4510 return label;
4511}
35f3420b 4512\f
4513/* Notes require a bit of special handling: Some notes need to have their
4514 BLOCK_FOR_INSN set, others should never have it set, and some should
4515 have it set or clear depending on the context. */
4516
4517/* Return true iff a note of kind SUBTYPE should be emitted with routines
4518 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4519 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4520
4521static bool
4522note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4523{
4524 switch (subtype)
4525 {
4526 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4527 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4528 return true;
4529
4530 /* Notes for var tracking and EH region markers can appear between or
4531 inside basic blocks. If the caller is emitting on the basic block
4532 boundary, do not set BLOCK_FOR_INSN on the new note. */
4533 case NOTE_INSN_VAR_LOCATION:
4534 case NOTE_INSN_CALL_ARG_LOCATION:
4535 case NOTE_INSN_EH_REGION_BEG:
4536 case NOTE_INSN_EH_REGION_END:
4537 return on_bb_boundary_p;
4538
4539 /* Otherwise, BLOCK_FOR_INSN must be set. */
4540 default:
4541 return false;
4542 }
4543}
15bbde2b 4544
4545/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4546
4547rtx
ad4583d9 4548emit_note_after (enum insn_note subtype, rtx after)
15bbde2b 4549{
35f3420b 4550 rtx note = make_note_raw (subtype);
4551 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4552 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4553
4554 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4555 add_insn_after_nobb (note, after);
4556 else
4557 add_insn_after (note, after, bb);
4558 return note;
4559}
4560
4561/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4562
4563rtx
4564emit_note_before (enum insn_note subtype, rtx before)
4565{
4566 rtx note = make_note_raw (subtype);
4567 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4568 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4569
4570 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4571 add_insn_before_nobb (note, before);
4572 else
4573 add_insn_before (note, before, bb);
15bbde2b 4574 return note;
4575}
15bbde2b 4576\f
ede4ebcb 4577/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4578 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4579
4580static rtx
4581emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4582 rtx (*make_raw) (rtx))
d321a68b 4583{
ede4ebcb 4584 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
d321a68b 4585
0891f67c 4586 if (pattern == NULL_RTX || !loc)
ca154f3f 4587 return last;
4588
31d3e01c 4589 after = NEXT_INSN (after);
4590 while (1)
4591 {
5169661d 4592 if (active_insn_p (after) && !INSN_LOCATION (after))
4593 INSN_LOCATION (after) = loc;
31d3e01c 4594 if (after == last)
4595 break;
4596 after = NEXT_INSN (after);
4597 }
d321a68b 4598 return last;
4599}
4600
ede4ebcb 4601/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4602 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4603 any DEBUG_INSNs. */
4604
4605static rtx
4606emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4607 rtx (*make_raw) (rtx))
0891f67c 4608{
9845d120 4609 rtx prev = after;
4610
ede4ebcb 4611 if (skip_debug_insns)
4612 while (DEBUG_INSN_P (prev))
4613 prev = PREV_INSN (prev);
9845d120 4614
4615 if (INSN_P (prev))
5169661d 4616 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
ede4ebcb 4617 make_raw);
0891f67c 4618 else
ede4ebcb 4619 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0891f67c 4620}
4621
5169661d 4622/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
d321a68b 4623rtx
ede4ebcb 4624emit_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4625{
ede4ebcb 4626 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4627}
31d3e01c 4628
5169661d 4629/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
ede4ebcb 4630rtx
4631emit_insn_after (rtx pattern, rtx after)
4632{
4633 return emit_pattern_after (pattern, after, true, make_insn_raw);
4634}
ca154f3f 4635
5169661d 4636/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
ede4ebcb 4637rtx
4638emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4639{
4640 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
d321a68b 4641}
4642
5169661d 4643/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
0891f67c 4644rtx
4645emit_jump_insn_after (rtx pattern, rtx after)
4646{
ede4ebcb 4647 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
0891f67c 4648}
4649
5169661d 4650/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
d321a68b 4651rtx
35cb5232 4652emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4653{
ede4ebcb 4654 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
d321a68b 4655}
4656
5169661d 4657/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
0891f67c 4658rtx
4659emit_call_insn_after (rtx pattern, rtx after)
4660{
ede4ebcb 4661 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
0891f67c 4662}
4663
5169661d 4664/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
9845d120 4665rtx
4666emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4667{
ede4ebcb 4668 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
9845d120 4669}
4670
5169661d 4671/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
9845d120 4672rtx
4673emit_debug_insn_after (rtx pattern, rtx after)
4674{
ede4ebcb 4675 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
9845d120 4676}
4677
ede4ebcb 4678/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4679 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4680 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4681 CALL_INSN, etc. */
4682
4683static rtx
4684emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4685 rtx (*make_raw) (rtx))
d321a68b 4686{
4687 rtx first = PREV_INSN (before);
ede4ebcb 4688 rtx last = emit_pattern_before_noloc (pattern, before,
4689 insnp ? before : NULL_RTX,
4690 NULL, make_raw);
0891f67c 4691
4692 if (pattern == NULL_RTX || !loc)
4693 return last;
4694
4486418e 4695 if (!first)
4696 first = get_insns ();
4697 else
4698 first = NEXT_INSN (first);
0891f67c 4699 while (1)
4700 {
5169661d 4701 if (active_insn_p (first) && !INSN_LOCATION (first))
4702 INSN_LOCATION (first) = loc;
0891f67c 4703 if (first == last)
4704 break;
4705 first = NEXT_INSN (first);
4706 }
4707 return last;
4708}
4709
ede4ebcb 4710/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4711 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4712 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4713 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4714
4715static rtx
4716emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4717 bool insnp, rtx (*make_raw) (rtx))
0891f67c 4718{
9845d120 4719 rtx next = before;
4720
ede4ebcb 4721 if (skip_debug_insns)
4722 while (DEBUG_INSN_P (next))
4723 next = PREV_INSN (next);
9845d120 4724
4725 if (INSN_P (next))
5169661d 4726 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
ede4ebcb 4727 insnp, make_raw);
0891f67c 4728 else
ede4ebcb 4729 return emit_pattern_before_noloc (pattern, before,
4730 insnp ? before : NULL_RTX,
4731 NULL, make_raw);
0891f67c 4732}
4733
5169661d 4734/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
0891f67c 4735rtx
ede4ebcb 4736emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0891f67c 4737{
ede4ebcb 4738 return emit_pattern_before_setloc (pattern, before, loc, true,
4739 make_insn_raw);
4740}
0891f67c 4741
5169661d 4742/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
ede4ebcb 4743rtx
4744emit_insn_before (rtx pattern, rtx before)
4745{
4746 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4747}
0891f67c 4748
5169661d 4749/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
ede4ebcb 4750rtx
4751emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4752{
4753 return emit_pattern_before_setloc (pattern, before, loc, false,
4754 make_jump_insn_raw);
0891f67c 4755}
4756
5169661d 4757/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
0891f67c 4758rtx
4759emit_jump_insn_before (rtx pattern, rtx before)
4760{
ede4ebcb 4761 return emit_pattern_before (pattern, before, true, false,
4762 make_jump_insn_raw);
0891f67c 4763}
4764
5169661d 4765/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
0891f67c 4766rtx
4767emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4768{
ede4ebcb 4769 return emit_pattern_before_setloc (pattern, before, loc, false,
4770 make_call_insn_raw);
d321a68b 4771}
0891f67c 4772
ede4ebcb 4773/* Like emit_call_insn_before_noloc,
5169661d 4774 but set insn_location according to BEFORE. */
0891f67c 4775rtx
4776emit_call_insn_before (rtx pattern, rtx before)
4777{
ede4ebcb 4778 return emit_pattern_before (pattern, before, true, false,
4779 make_call_insn_raw);
0891f67c 4780}
9845d120 4781
5169661d 4782/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
9845d120 4783rtx
4784emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4785{
ede4ebcb 4786 return emit_pattern_before_setloc (pattern, before, loc, false,
4787 make_debug_insn_raw);
9845d120 4788}
4789
ede4ebcb 4790/* Like emit_debug_insn_before_noloc,
5169661d 4791 but set insn_location according to BEFORE. */
9845d120 4792rtx
4793emit_debug_insn_before (rtx pattern, rtx before)
4794{
ede4ebcb 4795 return emit_pattern_before (pattern, before, false, false,
4796 make_debug_insn_raw);
9845d120 4797}
d321a68b 4798\f
31d3e01c 4799/* Take X and emit it at the end of the doubly-linked
4800 INSN list.
15bbde2b 4801
4802 Returns the last insn emitted. */
4803
4804rtx
35cb5232 4805emit_insn (rtx x)
15bbde2b 4806{
9af5ce0c 4807 rtx last = get_last_insn ();
31d3e01c 4808 rtx insn;
15bbde2b 4809
31d3e01c 4810 if (x == NULL_RTX)
4811 return last;
15bbde2b 4812
31d3e01c 4813 switch (GET_CODE (x))
4814 {
9845d120 4815 case DEBUG_INSN:
31d3e01c 4816 case INSN:
4817 case JUMP_INSN:
4818 case CALL_INSN:
4819 case CODE_LABEL:
4820 case BARRIER:
4821 case NOTE:
4822 insn = x;
4823 while (insn)
15bbde2b 4824 {
31d3e01c 4825 rtx next = NEXT_INSN (insn);
15bbde2b 4826 add_insn (insn);
31d3e01c 4827 last = insn;
4828 insn = next;
15bbde2b 4829 }
31d3e01c 4830 break;
15bbde2b 4831
31d3e01c 4832#ifdef ENABLE_RTL_CHECKING
91f71fa3 4833 case JUMP_TABLE_DATA:
31d3e01c 4834 case SEQUENCE:
611234b4 4835 gcc_unreachable ();
31d3e01c 4836 break;
4837#endif
15bbde2b 4838
31d3e01c 4839 default:
4840 last = make_insn_raw (x);
4841 add_insn (last);
4842 break;
15bbde2b 4843 }
4844
4845 return last;
4846}
4847
9845d120 4848/* Make an insn of code DEBUG_INSN with pattern X
4849 and add it to the end of the doubly-linked list. */
4850
4851rtx
4852emit_debug_insn (rtx x)
4853{
9af5ce0c 4854 rtx last = get_last_insn ();
9845d120 4855 rtx insn;
4856
4857 if (x == NULL_RTX)
4858 return last;
4859
4860 switch (GET_CODE (x))
4861 {
4862 case DEBUG_INSN:
4863 case INSN:
4864 case JUMP_INSN:
4865 case CALL_INSN:
4866 case CODE_LABEL:
4867 case BARRIER:
4868 case NOTE:
4869 insn = x;
4870 while (insn)
4871 {
4872 rtx next = NEXT_INSN (insn);
4873 add_insn (insn);
4874 last = insn;
4875 insn = next;
4876 }
4877 break;
4878
4879#ifdef ENABLE_RTL_CHECKING
91f71fa3 4880 case JUMP_TABLE_DATA:
9845d120 4881 case SEQUENCE:
4882 gcc_unreachable ();
4883 break;
4884#endif
4885
4886 default:
4887 last = make_debug_insn_raw (x);
4888 add_insn (last);
4889 break;
4890 }
4891
4892 return last;
4893}
4894
31d3e01c 4895/* Make an insn of code JUMP_INSN with pattern X
4896 and add it to the end of the doubly-linked list. */
15bbde2b 4897
4898rtx
35cb5232 4899emit_jump_insn (rtx x)
15bbde2b 4900{
d90b3d04 4901 rtx last = NULL_RTX, insn;
15bbde2b 4902
31d3e01c 4903 switch (GET_CODE (x))
15bbde2b 4904 {
9845d120 4905 case DEBUG_INSN:
31d3e01c 4906 case INSN:
4907 case JUMP_INSN:
4908 case CALL_INSN:
4909 case CODE_LABEL:
4910 case BARRIER:
4911 case NOTE:
4912 insn = x;
4913 while (insn)
4914 {
4915 rtx next = NEXT_INSN (insn);
4916 add_insn (insn);
4917 last = insn;
4918 insn = next;
4919 }
4920 break;
b36b07d8 4921
31d3e01c 4922#ifdef ENABLE_RTL_CHECKING
91f71fa3 4923 case JUMP_TABLE_DATA:
31d3e01c 4924 case SEQUENCE:
611234b4 4925 gcc_unreachable ();
31d3e01c 4926 break;
4927#endif
b36b07d8 4928
31d3e01c 4929 default:
4930 last = make_jump_insn_raw (x);
4931 add_insn (last);
4932 break;
9dda7915 4933 }
b36b07d8 4934
4935 return last;
4936}
4937
31d3e01c 4938/* Make an insn of code CALL_INSN with pattern X
15bbde2b 4939 and add it to the end of the doubly-linked list. */
4940
4941rtx
35cb5232 4942emit_call_insn (rtx x)
15bbde2b 4943{
31d3e01c 4944 rtx insn;
4945
4946 switch (GET_CODE (x))
15bbde2b 4947 {
9845d120 4948 case DEBUG_INSN:
31d3e01c 4949 case INSN:
4950 case JUMP_INSN:
4951 case CALL_INSN:
4952 case CODE_LABEL:
4953 case BARRIER:
4954 case NOTE:
4955 insn = emit_insn (x);
4956 break;
15bbde2b 4957
31d3e01c 4958#ifdef ENABLE_RTL_CHECKING
4959 case SEQUENCE:
91f71fa3 4960 case JUMP_TABLE_DATA:
611234b4 4961 gcc_unreachable ();
31d3e01c 4962 break;
4963#endif
15bbde2b 4964
31d3e01c 4965 default:
4966 insn = make_call_insn_raw (x);
15bbde2b 4967 add_insn (insn);
31d3e01c 4968 break;
15bbde2b 4969 }
31d3e01c 4970
4971 return insn;
15bbde2b 4972}
4973
4974/* Add the label LABEL to the end of the doubly-linked list. */
4975
4976rtx
35cb5232 4977emit_label (rtx label)
15bbde2b 4978{
596ef494 4979 gcc_checking_assert (INSN_UID (label) == 0);
4980 INSN_UID (label) = cur_insn_uid++;
4981 add_insn (label);
15bbde2b 4982 return label;
4983}
4984
91f71fa3 4985/* Make an insn of code JUMP_TABLE_DATA
4986 and add it to the end of the doubly-linked list. */
4987
4988rtx
4989emit_jump_table_data (rtx table)
4990{
4991 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4992 INSN_UID (jump_table_data) = cur_insn_uid++;
4993 PATTERN (jump_table_data) = table;
4994 BLOCK_FOR_INSN (jump_table_data) = NULL;
4995 add_insn (jump_table_data);
4996 return jump_table_data;
4997}
4998
15bbde2b 4999/* Make an insn of code BARRIER
5000 and add it to the end of the doubly-linked list. */
5001
5002rtx
35cb5232 5003emit_barrier (void)
15bbde2b 5004{
19cb6b50 5005 rtx barrier = rtx_alloc (BARRIER);
15bbde2b 5006 INSN_UID (barrier) = cur_insn_uid++;
5007 add_insn (barrier);
5008 return barrier;
5009}
5010
2f57e3d9 5011/* Emit a copy of note ORIG. */
35cb5232 5012
2f57e3d9 5013rtx
5014emit_note_copy (rtx orig)
5015{
35f3420b 5016 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5017 rtx note = make_note_raw (kind);
2f57e3d9 5018 NOTE_DATA (note) = NOTE_DATA (orig);
2f57e3d9 5019 add_insn (note);
31b97e8f 5020 return note;
15bbde2b 5021}
5022
31b97e8f 5023/* Make an insn of code NOTE or type NOTE_NO
5024 and add it to the end of the doubly-linked list. */
15bbde2b 5025
5026rtx
ad4583d9 5027emit_note (enum insn_note kind)
15bbde2b 5028{
35f3420b 5029 rtx note = make_note_raw (kind);
15bbde2b 5030 add_insn (note);
5031 return note;
5032}
5033
18b42941 5034/* Emit a clobber of lvalue X. */
5035
5036rtx
5037emit_clobber (rtx x)
5038{
5039 /* CONCATs should not appear in the insn stream. */
5040 if (GET_CODE (x) == CONCAT)
5041 {
5042 emit_clobber (XEXP (x, 0));
5043 return emit_clobber (XEXP (x, 1));
5044 }
5045 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5046}
5047
5048/* Return a sequence of insns to clobber lvalue X. */
5049
5050rtx
5051gen_clobber (rtx x)
5052{
5053 rtx seq;
5054
5055 start_sequence ();
5056 emit_clobber (x);
5057 seq = get_insns ();
5058 end_sequence ();
5059 return seq;
5060}
5061
5062/* Emit a use of rvalue X. */
5063
5064rtx
5065emit_use (rtx x)
5066{
5067 /* CONCATs should not appear in the insn stream. */
5068 if (GET_CODE (x) == CONCAT)
5069 {
5070 emit_use (XEXP (x, 0));
5071 return emit_use (XEXP (x, 1));
5072 }
5073 return emit_insn (gen_rtx_USE (VOIDmode, x));
5074}
5075
5076/* Return a sequence of insns to use rvalue X. */
5077
5078rtx
5079gen_use (rtx x)
5080{
5081 rtx seq;
5082
5083 start_sequence ();
5084 emit_use (x);
5085 seq = get_insns ();
5086 end_sequence ();
5087 return seq;
5088}
5089
f1934a33 5090/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 5091 note of this type already exists, remove it first. */
f1934a33 5092
c080d8f0 5093rtx
35cb5232 5094set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
f1934a33 5095{
5096 rtx note = find_reg_note (insn, kind, NULL_RTX);
5097
7e6224ab 5098 switch (kind)
5099 {
5100 case REG_EQUAL:
5101 case REG_EQUIV:
5102 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5103 has multiple sets (some callers assume single_set
5104 means the insn only has one set, when in fact it
5105 means the insn only has one * useful * set). */
5106 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5107 {
611234b4 5108 gcc_assert (!note);
7e6224ab 5109 return NULL_RTX;
5110 }
5111
5112 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5113 It serves no useful purpose and breaks eliminate_regs. */
5114 if (GET_CODE (datum) == ASM_OPERANDS)
5115 return NULL_RTX;
3072d30e 5116
5117 if (note)
5118 {
5119 XEXP (note, 0) = datum;
5120 df_notes_rescan (insn);
5121 return note;
5122 }
7e6224ab 5123 break;
5124
5125 default:
3072d30e 5126 if (note)
5127 {
5128 XEXP (note, 0) = datum;
5129 return note;
5130 }
7e6224ab 5131 break;
5132 }
c080d8f0 5133
a1ddb869 5134 add_reg_note (insn, kind, datum);
3072d30e 5135
5136 switch (kind)
c080d8f0 5137 {
3072d30e 5138 case REG_EQUAL:
5139 case REG_EQUIV:
5140 df_notes_rescan (insn);
5141 break;
5142 default:
5143 break;
c080d8f0 5144 }
f1934a33 5145
c080d8f0 5146 return REG_NOTES (insn);
f1934a33 5147}
41cf444a 5148
5149/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5150rtx
5151set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5152{
5153 rtx set = single_set (insn);
5154
5155 if (set && SET_DEST (set) == dst)
5156 return set_unique_reg_note (insn, kind, datum);
5157 return NULL_RTX;
5158}
15bbde2b 5159\f
5160/* Return an indication of which type of insn should have X as a body.
5161 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5162
9b69f75b 5163static enum rtx_code
35cb5232 5164classify_insn (rtx x)
15bbde2b 5165{
6d7dc5b9 5166 if (LABEL_P (x))
15bbde2b 5167 return CODE_LABEL;
5168 if (GET_CODE (x) == CALL)
5169 return CALL_INSN;
9cb2517e 5170 if (ANY_RETURN_P (x))
15bbde2b 5171 return JUMP_INSN;
5172 if (GET_CODE (x) == SET)
5173 {
5174 if (SET_DEST (x) == pc_rtx)
5175 return JUMP_INSN;
5176 else if (GET_CODE (SET_SRC (x)) == CALL)
5177 return CALL_INSN;
5178 else
5179 return INSN;
5180 }
5181 if (GET_CODE (x) == PARALLEL)
5182 {
19cb6b50 5183 int j;
15bbde2b 5184 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5185 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5186 return CALL_INSN;
5187 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5188 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5189 return JUMP_INSN;
5190 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5191 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5192 return CALL_INSN;
5193 }
5194 return INSN;
5195}
5196
5197/* Emit the rtl pattern X as an appropriate kind of insn.
5198 If X is a label, it is simply added into the insn chain. */
5199
5200rtx
35cb5232 5201emit (rtx x)
15bbde2b 5202{
5203 enum rtx_code code = classify_insn (x);
5204
611234b4 5205 switch (code)
15bbde2b 5206 {
611234b4 5207 case CODE_LABEL:
5208 return emit_label (x);
5209 case INSN:
5210 return emit_insn (x);
5211 case JUMP_INSN:
5212 {
5213 rtx insn = emit_jump_insn (x);
5214 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5215 return emit_barrier ();
5216 return insn;
5217 }
5218 case CALL_INSN:
5219 return emit_call_insn (x);
9845d120 5220 case DEBUG_INSN:
5221 return emit_debug_insn (x);
611234b4 5222 default:
5223 gcc_unreachable ();
15bbde2b 5224 }
15bbde2b 5225}
5226\f
1f3233d1 5227/* Space for free sequence stack entries. */
7035b2ab 5228static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
1f3233d1 5229
735f4358 5230/* Begin emitting insns to a sequence. If this sequence will contain
5231 something that might cause the compiler to pop arguments to function
5232 calls (because those pops have previously been deferred; see
5233 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5234 before calling this function. That will ensure that the deferred
5235 pops are not accidentally emitted in the middle of this sequence. */
15bbde2b 5236
5237void
35cb5232 5238start_sequence (void)
15bbde2b 5239{
5240 struct sequence_stack *tem;
5241
1f3233d1 5242 if (free_sequence_stack != NULL)
5243 {
5244 tem = free_sequence_stack;
5245 free_sequence_stack = tem->next;
5246 }
5247 else
ba72912a 5248 tem = ggc_alloc_sequence_stack ();
15bbde2b 5249
0a893c29 5250 tem->next = seq_stack;
06f9d6ef 5251 tem->first = get_insns ();
5252 tem->last = get_last_insn ();
15bbde2b 5253
0a893c29 5254 seq_stack = tem;
15bbde2b 5255
06f9d6ef 5256 set_first_insn (0);
5257 set_last_insn (0);
15bbde2b 5258}
5259
b49854c6 5260/* Set up the insn chain starting with FIRST as the current sequence,
5261 saving the previously current one. See the documentation for
5262 start_sequence for more information about how to use this function. */
15bbde2b 5263
5264void
35cb5232 5265push_to_sequence (rtx first)
15bbde2b 5266{
5267 rtx last;
5268
5269 start_sequence ();
5270
3c802a1e 5271 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5272 ;
15bbde2b 5273
06f9d6ef 5274 set_first_insn (first);
5275 set_last_insn (last);
15bbde2b 5276}
5277
28bf151d 5278/* Like push_to_sequence, but take the last insn as an argument to avoid
5279 looping through the list. */
5280
5281void
5282push_to_sequence2 (rtx first, rtx last)
5283{
5284 start_sequence ();
5285
06f9d6ef 5286 set_first_insn (first);
5287 set_last_insn (last);
28bf151d 5288}
5289
ab74c92f 5290/* Set up the outer-level insn chain
5291 as the current sequence, saving the previously current one. */
5292
5293void
35cb5232 5294push_topmost_sequence (void)
ab74c92f 5295{
2041cfd9 5296 struct sequence_stack *stack, *top = NULL;
ab74c92f 5297
5298 start_sequence ();
5299
0a893c29 5300 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5301 top = stack;
5302
06f9d6ef 5303 set_first_insn (top->first);
5304 set_last_insn (top->last);
ab74c92f 5305}
5306
5307/* After emitting to the outer-level insn chain, update the outer-level
5308 insn chain, and restore the previous saved state. */
5309
5310void
35cb5232 5311pop_topmost_sequence (void)
ab74c92f 5312{
2041cfd9 5313 struct sequence_stack *stack, *top = NULL;
ab74c92f 5314
0a893c29 5315 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5316 top = stack;
5317
06f9d6ef 5318 top->first = get_insns ();
5319 top->last = get_last_insn ();
ab74c92f 5320
5321 end_sequence ();
5322}
5323
15bbde2b 5324/* After emitting to a sequence, restore previous saved state.
5325
b49854c6 5326 To get the contents of the sequence just made, you must call
31d3e01c 5327 `get_insns' *before* calling here.
b49854c6 5328
5329 If the compiler might have deferred popping arguments while
5330 generating this sequence, and this sequence will not be immediately
5331 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 5332 before calling get_insns. That will ensure that the deferred
b49854c6 5333 pops are inserted into this sequence, and not into some random
5334 location in the instruction stream. See INHIBIT_DEFER_POP for more
5335 information about deferred popping of arguments. */
15bbde2b 5336
5337void
35cb5232 5338end_sequence (void)
15bbde2b 5339{
0a893c29 5340 struct sequence_stack *tem = seq_stack;
15bbde2b 5341
06f9d6ef 5342 set_first_insn (tem->first);
5343 set_last_insn (tem->last);
0a893c29 5344 seq_stack = tem->next;
15bbde2b 5345
1f3233d1 5346 memset (tem, 0, sizeof (*tem));
5347 tem->next = free_sequence_stack;
5348 free_sequence_stack = tem;
15bbde2b 5349}
5350
5351/* Return 1 if currently emitting into a sequence. */
5352
5353int
35cb5232 5354in_sequence_p (void)
15bbde2b 5355{
0a893c29 5356 return seq_stack != 0;
15bbde2b 5357}
15bbde2b 5358\f
02ebfa52 5359/* Put the various virtual registers into REGNO_REG_RTX. */
5360
2f3874ce 5361static void
b079a207 5362init_virtual_regs (void)
02ebfa52 5363{
b079a207 5364 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5365 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5366 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5367 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5368 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
60778e62 5369 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5370 = virtual_preferred_stack_boundary_rtx;
0a893c29 5371}
5372
928d57e3 5373\f
5374/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5375static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5376static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5377static int copy_insn_n_scratches;
5378
5379/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5380 copied an ASM_OPERANDS.
5381 In that case, it is the original input-operand vector. */
5382static rtvec orig_asm_operands_vector;
5383
5384/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5385 copied an ASM_OPERANDS.
5386 In that case, it is the copied input-operand vector. */
5387static rtvec copy_asm_operands_vector;
5388
5389/* Likewise for the constraints vector. */
5390static rtvec orig_asm_constraints_vector;
5391static rtvec copy_asm_constraints_vector;
5392
5393/* Recursively create a new copy of an rtx for copy_insn.
5394 This function differs from copy_rtx in that it handles SCRATCHes and
5395 ASM_OPERANDs properly.
5396 Normally, this function is not used directly; use copy_insn as front end.
5397 However, you could first copy an insn pattern with copy_insn and then use
5398 this function afterwards to properly copy any REG_NOTEs containing
5399 SCRATCHes. */
5400
5401rtx
35cb5232 5402copy_insn_1 (rtx orig)
928d57e3 5403{
19cb6b50 5404 rtx copy;
5405 int i, j;
5406 RTX_CODE code;
5407 const char *format_ptr;
928d57e3 5408
25e880b1 5409 if (orig == NULL)
5410 return NULL;
5411
928d57e3 5412 code = GET_CODE (orig);
5413
5414 switch (code)
5415 {
5416 case REG:
d7fce3c8 5417 case DEBUG_EXPR:
0349edce 5418 CASE_CONST_ANY:
928d57e3 5419 case SYMBOL_REF:
5420 case CODE_LABEL:
5421 case PC:
5422 case CC0:
e0691b9a 5423 case RETURN:
9cb2517e 5424 case SIMPLE_RETURN:
928d57e3 5425 return orig;
c09425a0 5426 case CLOBBER:
b291008a 5427 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5428 clobbers or clobbers of hard registers that originated as pseudos.
5429 This is needed to allow safe register renaming. */
5430 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5431 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
c09425a0 5432 return orig;
5433 break;
928d57e3 5434
5435 case SCRATCH:
5436 for (i = 0; i < copy_insn_n_scratches; i++)
5437 if (copy_insn_scratch_in[i] == orig)
5438 return copy_insn_scratch_out[i];
5439 break;
5440
5441 case CONST:
3072d30e 5442 if (shared_const_p (orig))
928d57e3 5443 return orig;
5444 break;
d823ba47 5445
928d57e3 5446 /* A MEM with a constant address is not sharable. The problem is that
5447 the constant address may need to be reloaded. If the mem is shared,
5448 then reloading one copy of this mem will cause all copies to appear
5449 to have been reloaded. */
5450
5451 default:
5452 break;
5453 }
5454
f2d0e9f1 5455 /* Copy the various flags, fields, and other information. We assume
5456 that all fields need copying, and then clear the fields that should
928d57e3 5457 not be copied. That is the sensible default behavior, and forces
5458 us to explicitly document why we are *not* copying a flag. */
f2d0e9f1 5459 copy = shallow_copy_rtx (orig);
928d57e3 5460
5461 /* We do not copy the USED flag, which is used as a mark bit during
5462 walks over the RTL. */
7c25cb91 5463 RTX_FLAG (copy, used) = 0;
928d57e3 5464
5465 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
6720e96c 5466 if (INSN_P (orig))
928d57e3 5467 {
7c25cb91 5468 RTX_FLAG (copy, jump) = 0;
5469 RTX_FLAG (copy, call) = 0;
5470 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5471 }
d823ba47 5472
928d57e3 5473 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5474
5475 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
f2d0e9f1 5476 switch (*format_ptr++)
5477 {
5478 case 'e':
5479 if (XEXP (orig, i) != NULL)
5480 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5481 break;
928d57e3 5482
f2d0e9f1 5483 case 'E':
5484 case 'V':
5485 if (XVEC (orig, i) == orig_asm_constraints_vector)
5486 XVEC (copy, i) = copy_asm_constraints_vector;
5487 else if (XVEC (orig, i) == orig_asm_operands_vector)
5488 XVEC (copy, i) = copy_asm_operands_vector;
5489 else if (XVEC (orig, i) != NULL)
5490 {
5491 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5492 for (j = 0; j < XVECLEN (copy, i); j++)
5493 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5494 }
5495 break;
928d57e3 5496
f2d0e9f1 5497 case 't':
5498 case 'w':
5499 case 'i':
5500 case 's':
5501 case 'S':
5502 case 'u':
5503 case '0':
5504 /* These are left unchanged. */
5505 break;
928d57e3 5506
f2d0e9f1 5507 default:
5508 gcc_unreachable ();
5509 }
928d57e3 5510
5511 if (code == SCRATCH)
5512 {
5513 i = copy_insn_n_scratches++;
611234b4 5514 gcc_assert (i < MAX_RECOG_OPERANDS);
928d57e3 5515 copy_insn_scratch_in[i] = orig;
5516 copy_insn_scratch_out[i] = copy;
5517 }
5518 else if (code == ASM_OPERANDS)
5519 {
d91f2122 5520 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5521 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5522 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5523 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5524 }
5525
5526 return copy;
5527}
5528
5529/* Create a new copy of an rtx.
5530 This function differs from copy_rtx in that it handles SCRATCHes and
5531 ASM_OPERANDs properly.
5532 INSN doesn't really have to be a full INSN; it could be just the
5533 pattern. */
5534rtx
35cb5232 5535copy_insn (rtx insn)
928d57e3 5536{
5537 copy_insn_n_scratches = 0;
5538 orig_asm_operands_vector = 0;
5539 orig_asm_constraints_vector = 0;
5540 copy_asm_operands_vector = 0;
5541 copy_asm_constraints_vector = 0;
5542 return copy_insn_1 (insn);
5543}
02ebfa52 5544
a9abe1f1 5545/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5546 on that assumption that INSN itself remains in its original place. */
5547
5548rtx
5549copy_delay_slot_insn (rtx insn)
5550{
5551 /* Copy INSN with its rtx_code, all its notes, location etc. */
5552 insn = copy_rtx (insn);
5553 INSN_UID (insn) = cur_insn_uid++;
5554 return insn;
5555}
5556
15bbde2b 5557/* Initialize data structures and variables in this file
5558 before generating rtl for each function. */
5559
5560void
35cb5232 5561init_emit (void)
15bbde2b 5562{
06f9d6ef 5563 set_first_insn (NULL);
5564 set_last_insn (NULL);
9845d120 5565 if (MIN_NONDEBUG_INSN_UID)
5566 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5567 else
5568 cur_insn_uid = 1;
5569 cur_debug_insn_uid = 1;
15bbde2b 5570 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
15bbde2b 5571 first_label_num = label_num;
0a893c29 5572 seq_stack = NULL;
15bbde2b 5573
15bbde2b 5574 /* Init the tables that describe all the pseudo regs. */
5575
fd6ffb7c 5576 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5577
fd6ffb7c 5578 crtl->emit.regno_pointer_align
2457c754 5579 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
d4c332ff 5580
ba72912a 5581 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
fcdc122e 5582
936082bb 5583 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5584 memcpy (regno_reg_rtx,
679bcc8d 5585 initial_regno_reg_rtx,
90295bd2 5586 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5587
15bbde2b 5588 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
b079a207 5589 init_virtual_regs ();
888e0d33 5590
5591 /* Indicate that the virtual registers and stack locations are
5592 all pointers. */
e61a0a7f 5593 REG_POINTER (stack_pointer_rtx) = 1;
5594 REG_POINTER (frame_pointer_rtx) = 1;
5595 REG_POINTER (hard_frame_pointer_rtx) = 1;
5596 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5597
e61a0a7f 5598 REG_POINTER (virtual_incoming_args_rtx) = 1;
5599 REG_POINTER (virtual_stack_vars_rtx) = 1;
5600 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5601 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5602 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5603
d4c332ff 5604#ifdef STACK_BOUNDARY
80909c64 5605 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5606 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5607 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5608 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5609
5610 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5611 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5612 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5613 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5614 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5615#endif
5616
89525da0 5617#ifdef INIT_EXPANDERS
5618 INIT_EXPANDERS;
5619#endif
15bbde2b 5620}
5621
6e68dcb2 5622/* Generate a vector constant for mode MODE and constant value CONSTANT. */
886cfd4f 5623
5624static rtx
6e68dcb2 5625gen_const_vector (enum machine_mode mode, int constant)
886cfd4f 5626{
5627 rtx tem;
5628 rtvec v;
5629 int units, i;
5630 enum machine_mode inner;
5631
5632 units = GET_MODE_NUNITS (mode);
5633 inner = GET_MODE_INNER (mode);
5634
069b07bf 5635 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5636
886cfd4f 5637 v = rtvec_alloc (units);
5638
6e68dcb2 5639 /* We need to call this function after we set the scalar const_tiny_rtx
5640 entries. */
5641 gcc_assert (const_tiny_rtx[constant][(int) inner]);
886cfd4f 5642
5643 for (i = 0; i < units; ++i)
6e68dcb2 5644 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
886cfd4f 5645
9426b612 5646 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5647 return tem;
5648}
5649
9426b612 5650/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6e68dcb2 5651 all elements are zero, and the one vector when all elements are one. */
9426b612 5652rtx
35cb5232 5653gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
9426b612 5654{
6e68dcb2 5655 enum machine_mode inner = GET_MODE_INNER (mode);
5656 int nunits = GET_MODE_NUNITS (mode);
5657 rtx x;
9426b612 5658 int i;
5659
6e68dcb2 5660 /* Check to see if all of the elements have the same value. */
5661 x = RTVEC_ELT (v, nunits - 1);
5662 for (i = nunits - 2; i >= 0; i--)
5663 if (RTVEC_ELT (v, i) != x)
5664 break;
5665
5666 /* If the values are all the same, check to see if we can use one of the
5667 standard constant vectors. */
5668 if (i == -1)
5669 {
5670 if (x == CONST0_RTX (inner))
5671 return CONST0_RTX (mode);
5672 else if (x == CONST1_RTX (inner))
5673 return CONST1_RTX (mode);
ba8dfb08 5674 else if (x == CONSTM1_RTX (inner))
5675 return CONSTM1_RTX (mode);
6e68dcb2 5676 }
5677
5678 return gen_rtx_raw_CONST_VECTOR (mode, v);
9426b612 5679}
5680
6d8b68a3 5681/* Initialise global register information required by all functions. */
5682
5683void
5684init_emit_regs (void)
5685{
5686 int i;
d83fcaa1 5687 enum machine_mode mode;
5688 mem_attrs *attrs;
6d8b68a3 5689
5690 /* Reset register attributes */
5691 htab_empty (reg_attrs_htab);
5692
5693 /* We need reg_raw_mode, so initialize the modes now. */
5694 init_reg_modes_target ();
5695
5696 /* Assign register numbers to the globally defined register rtx. */
6d8b68a3 5697 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5698 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5699 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5700 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5701 virtual_incoming_args_rtx =
5702 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5703 virtual_stack_vars_rtx =
5704 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5705 virtual_stack_dynamic_rtx =
5706 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5707 virtual_outgoing_args_rtx =
5708 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5709 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
60778e62 5710 virtual_preferred_stack_boundary_rtx =
5711 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6d8b68a3 5712
5713 /* Initialize RTL for commonly used hard registers. These are
5714 copied into regno_reg_rtx as we begin to compile each function. */
5715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
679bcc8d 5716 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6d8b68a3 5717
5718#ifdef RETURN_ADDRESS_POINTER_REGNUM
5719 return_address_pointer_rtx
5720 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5721#endif
5722
6d8b68a3 5723 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5724 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5725 else
5726 pic_offset_table_rtx = NULL_RTX;
d83fcaa1 5727
5728 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5729 {
5730 mode = (enum machine_mode) i;
5731 attrs = ggc_alloc_cleared_mem_attrs ();
5732 attrs->align = BITS_PER_UNIT;
5733 attrs->addrspace = ADDR_SPACE_GENERIC;
5734 if (mode != BLKmode)
5735 {
6d58bcba 5736 attrs->size_known_p = true;
5737 attrs->size = GET_MODE_SIZE (mode);
d83fcaa1 5738 if (STRICT_ALIGNMENT)
5739 attrs->align = GET_MODE_ALIGNMENT (mode);
5740 }
5741 mode_mem_attrs[i] = attrs;
5742 }
6d8b68a3 5743}
5744
01703575 5745/* Create some permanent unique rtl objects shared between all functions. */
15bbde2b 5746
5747void
01703575 5748init_emit_once (void)
15bbde2b 5749{
5750 int i;
5751 enum machine_mode mode;
9e042f31 5752 enum machine_mode double_mode;
15bbde2b 5753
e913b5cd 5754 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5755 CONST_FIXED, and memory attribute hash tables. */
573aba85 5756 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5757 const_int_htab_eq, NULL);
c6259b83 5758
e913b5cd 5759#if TARGET_SUPPORTS_WIDE_INT
5760 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5761 const_wide_int_htab_eq, NULL);
5762#endif
573aba85 5763 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5764 const_double_htab_eq, NULL);
2ff23ed0 5765
e397ad8e 5766 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5767 const_fixed_htab_eq, NULL);
5768
573aba85 5769 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5770 mem_attrs_htab_eq, NULL);
ca74b940 5771 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5772 reg_attrs_htab_eq, NULL);
77695070 5773
71d7daa2 5774 /* Compute the word and byte modes. */
5775
5776 byte_mode = VOIDmode;
5777 word_mode = VOIDmode;
5778 double_mode = VOIDmode;
5779
069b07bf 5780 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5781 mode != VOIDmode;
71d7daa2 5782 mode = GET_MODE_WIDER_MODE (mode))
5783 {
5784 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5785 && byte_mode == VOIDmode)
5786 byte_mode = mode;
5787
5788 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5789 && word_mode == VOIDmode)
5790 word_mode = mode;
5791 }
5792
069b07bf 5793 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5794 mode != VOIDmode;
71d7daa2 5795 mode = GET_MODE_WIDER_MODE (mode))
5796 {
5797 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5798 && double_mode == VOIDmode)
5799 double_mode = mode;
5800 }
5801
5802 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5803
57c097d5 5804#ifdef INIT_EXPANDERS
ab5beff9 5805 /* This is to initialize {init|mark|free}_machine_status before the first
5806 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5807 end which calls push_function_context_to before the first call to
57c097d5 5808 init_function_start. */
5809 INIT_EXPANDERS;
5810#endif
5811
15bbde2b 5812 /* Create the unique rtx's for certain rtx codes and operand values. */
5813
8fd5918e 5814 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
7014838c 5815 tries to use these variables. */
15bbde2b 5816 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5817 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5818 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5819
1a60f06a 5820 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5821 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5822 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5823 else
3ad7bb1c 5824 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5825
cc69d08a 5826 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5827 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5828 real_from_integer (&dconst2, double_mode, 2, SIGNED);
3fa759a9 5829
5830 dconstm1 = dconst1;
5831 dconstm1.sign = 1;
77e89269 5832
5833 dconsthalf = dconst1;
9d96125b 5834 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
15bbde2b 5835
ba8dfb08 5836 for (i = 0; i < 3; i++)
15bbde2b 5837 {
3fa759a9 5838 const REAL_VALUE_TYPE *const r =
badfe841 5839 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5840
069b07bf 5841 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5842 mode != VOIDmode;
5843 mode = GET_MODE_WIDER_MODE (mode))
5844 const_tiny_rtx[i][(int) mode] =
5845 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5846
5847 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5848 mode != VOIDmode;
15bbde2b 5849 mode = GET_MODE_WIDER_MODE (mode))
2ff23ed0 5850 const_tiny_rtx[i][(int) mode] =
5851 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
15bbde2b 5852
b572011e 5853 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5854
069b07bf 5855 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5856 mode != VOIDmode;
15bbde2b 5857 mode = GET_MODE_WIDER_MODE (mode))
b572011e 5858 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5859
8c20007a 5860 for (mode = MIN_MODE_PARTIAL_INT;
5861 mode <= MAX_MODE_PARTIAL_INT;
5862 mode = (enum machine_mode)((int)(mode) + 1))
7540dcc4 5863 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5864 }
5865
ba8dfb08 5866 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5867
5868 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5869 mode != VOIDmode;
5870 mode = GET_MODE_WIDER_MODE (mode))
5871 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5872
8c20007a 5873 for (mode = MIN_MODE_PARTIAL_INT;
5874 mode <= MAX_MODE_PARTIAL_INT;
5875 mode = (enum machine_mode)((int)(mode) + 1))
dd276d20 5876 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5877
4248fc32 5878 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5879 mode != VOIDmode;
5880 mode = GET_MODE_WIDER_MODE (mode))
5881 {
5882 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5883 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5884 }
5885
5886 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5887 mode != VOIDmode;
5888 mode = GET_MODE_WIDER_MODE (mode))
5889 {
5890 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5891 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5892 }
5893
886cfd4f 5894 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5895 mode != VOIDmode;
5896 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5897 {
5898 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5899 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
ba8dfb08 5900 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6e68dcb2 5901 }
886cfd4f 5902
5903 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5904 mode != VOIDmode;
5905 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5906 {
5907 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5908 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5909 }
886cfd4f 5910
06f0b99c 5911 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5912 mode != VOIDmode;
5913 mode = GET_MODE_WIDER_MODE (mode))
5914 {
9af5ce0c 5915 FCONST0 (mode).data.high = 0;
5916 FCONST0 (mode).data.low = 0;
5917 FCONST0 (mode).mode = mode;
e397ad8e 5918 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5919 FCONST0 (mode), mode);
06f0b99c 5920 }
5921
5922 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5923 mode != VOIDmode;
5924 mode = GET_MODE_WIDER_MODE (mode))
5925 {
9af5ce0c 5926 FCONST0 (mode).data.high = 0;
5927 FCONST0 (mode).data.low = 0;
5928 FCONST0 (mode).mode = mode;
e397ad8e 5929 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5930 FCONST0 (mode), mode);
06f0b99c 5931 }
5932
5933 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5934 mode != VOIDmode;
5935 mode = GET_MODE_WIDER_MODE (mode))
5936 {
9af5ce0c 5937 FCONST0 (mode).data.high = 0;
5938 FCONST0 (mode).data.low = 0;
5939 FCONST0 (mode).mode = mode;
e397ad8e 5940 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5941 FCONST0 (mode), mode);
06f0b99c 5942
5943 /* We store the value 1. */
9af5ce0c 5944 FCONST1 (mode).data.high = 0;
5945 FCONST1 (mode).data.low = 0;
5946 FCONST1 (mode).mode = mode;
5947 FCONST1 (mode).data
d67b7119 5948 = double_int_one.lshift (GET_MODE_FBIT (mode),
5949 HOST_BITS_PER_DOUBLE_INT,
5950 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5951 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5952 FCONST1 (mode), mode);
06f0b99c 5953 }
5954
5955 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5956 mode != VOIDmode;
5957 mode = GET_MODE_WIDER_MODE (mode))
5958 {
9af5ce0c 5959 FCONST0 (mode).data.high = 0;
5960 FCONST0 (mode).data.low = 0;
5961 FCONST0 (mode).mode = mode;
e397ad8e 5962 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5963 FCONST0 (mode), mode);
06f0b99c 5964
5965 /* We store the value 1. */
9af5ce0c 5966 FCONST1 (mode).data.high = 0;
5967 FCONST1 (mode).data.low = 0;
5968 FCONST1 (mode).mode = mode;
5969 FCONST1 (mode).data
d67b7119 5970 = double_int_one.lshift (GET_MODE_FBIT (mode),
5971 HOST_BITS_PER_DOUBLE_INT,
5972 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5973 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5974 FCONST1 (mode), mode);
5975 }
5976
5977 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5978 mode != VOIDmode;
5979 mode = GET_MODE_WIDER_MODE (mode))
5980 {
5981 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5982 }
5983
5984 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5985 mode != VOIDmode;
5986 mode = GET_MODE_WIDER_MODE (mode))
5987 {
5988 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5989 }
5990
5991 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5992 mode != VOIDmode;
5993 mode = GET_MODE_WIDER_MODE (mode))
5994 {
5995 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5996 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5997 }
5998
5999 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6000 mode != VOIDmode;
6001 mode = GET_MODE_WIDER_MODE (mode))
6002 {
6003 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6004 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
06f0b99c 6005 }
6006
0fd4500a 6007 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6008 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6009 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 6010
065336b4 6011 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6012 if (STORE_FLAG_VALUE == 1)
6013 const_tiny_rtx[1][(int) BImode] = const1_rtx;
7d7b0bac 6014
6015 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6016 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6017 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6018 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
15bbde2b 6019}
ac6c481d 6020\f
cd0fe062 6021/* Produce exact duplicate of insn INSN after AFTER.
6022 Care updating of libcall regions if present. */
6023
6024rtx
35cb5232 6025emit_copy_of_insn_after (rtx insn, rtx after)
cd0fe062 6026{
9ce37fa7 6027 rtx new_rtx, link;
cd0fe062 6028
6029 switch (GET_CODE (insn))
6030 {
6031 case INSN:
9ce37fa7 6032 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6033 break;
6034
6035 case JUMP_INSN:
9ce37fa7 6036 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6037 break;
6038
9845d120 6039 case DEBUG_INSN:
6040 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6041 break;
6042
cd0fe062 6043 case CALL_INSN:
9ce37fa7 6044 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6045 if (CALL_INSN_FUNCTION_USAGE (insn))
9ce37fa7 6046 CALL_INSN_FUNCTION_USAGE (new_rtx)
cd0fe062 6047 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
9ce37fa7 6048 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6049 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6050 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
48e1416a 6051 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
9c2a0c05 6052 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
cd0fe062 6053 break;
6054
6055 default:
611234b4 6056 gcc_unreachable ();
cd0fe062 6057 }
6058
6059 /* Update LABEL_NUSES. */
9ce37fa7 6060 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
cd0fe062 6061
5169661d 6062 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ab87d1bc 6063
98116afd 6064 /* If the old insn is frame related, then so is the new one. This is
6065 primarily needed for IA-64 unwind info which marks epilogue insns,
6066 which may be duplicated by the basic block reordering code. */
9ce37fa7 6067 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
98116afd 6068
19d2fe05 6069 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6070 will make them. REG_LABEL_TARGETs are created there too, but are
6071 supposed to be sticky, so we copy them. */
cd0fe062 6072 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
19d2fe05 6073 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
cd0fe062 6074 {
6075 if (GET_CODE (link) == EXPR_LIST)
9ce37fa7 6076 add_reg_note (new_rtx, REG_NOTE_KIND (link),
a1ddb869 6077 copy_insn_1 (XEXP (link, 0)));
cd0fe062 6078 else
9eb946de 6079 add_shallow_copy_of_reg_note (new_rtx, link);
cd0fe062 6080 }
6081
9ce37fa7 6082 INSN_CODE (new_rtx) = INSN_CODE (insn);
6083 return new_rtx;
cd0fe062 6084}
1f3233d1 6085
7035b2ab 6086static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
c09425a0 6087rtx
6088gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6089{
6090 if (hard_reg_clobbers[mode][regno])
6091 return hard_reg_clobbers[mode][regno];
6092 else
6093 return (hard_reg_clobbers[mode][regno] =
6094 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6095}
6096
5169661d 6097location_t prologue_location;
6098location_t epilogue_location;
23a070f3 6099
6100/* Hold current location information and last location information, so the
6101 datastructures are built lazily only when some instructions in given
6102 place are needed. */
c7abeac5 6103static location_t curr_location;
23a070f3 6104
5169661d 6105/* Allocate insn location datastructure. */
23a070f3 6106void
5169661d 6107insn_locations_init (void)
23a070f3 6108{
5169661d 6109 prologue_location = epilogue_location = 0;
23a070f3 6110 curr_location = UNKNOWN_LOCATION;
23a070f3 6111}
6112
6113/* At the end of emit stage, clear current location. */
6114void
5169661d 6115insn_locations_finalize (void)
23a070f3 6116{
5169661d 6117 epilogue_location = curr_location;
6118 curr_location = UNKNOWN_LOCATION;
23a070f3 6119}
6120
6121/* Set current location. */
6122void
5169661d 6123set_curr_insn_location (location_t location)
23a070f3 6124{
23a070f3 6125 curr_location = location;
6126}
6127
6128/* Get current location. */
6129location_t
5169661d 6130curr_insn_location (void)
23a070f3 6131{
6132 return curr_location;
6133}
6134
23a070f3 6135/* Return lexical scope block insn belongs to. */
6136tree
6137insn_scope (const_rtx insn)
6138{
5169661d 6139 return LOCATION_BLOCK (INSN_LOCATION (insn));
23a070f3 6140}
6141
6142/* Return line number of the statement that produced this insn. */
6143int
6144insn_line (const_rtx insn)
6145{
5169661d 6146 return LOCATION_LINE (INSN_LOCATION (insn));
23a070f3 6147}
6148
6149/* Return source file of the statement that produced this insn. */
6150const char *
6151insn_file (const_rtx insn)
6152{
5169661d 6153 return LOCATION_FILE (INSN_LOCATION (insn));
23a070f3 6154}
30c3c442 6155
6156/* Return true if memory model MODEL requires a pre-operation (release-style)
6157 barrier or a post-operation (acquire-style) barrier. While not universal,
6158 this function matches behavior of several targets. */
6159
6160bool
6161need_atomic_barrier_p (enum memmodel model, bool pre)
6162{
1a9fa1dd 6163 switch (model & MEMMODEL_MASK)
30c3c442 6164 {
6165 case MEMMODEL_RELAXED:
6166 case MEMMODEL_CONSUME:
6167 return false;
6168 case MEMMODEL_RELEASE:
6169 return pre;
6170 case MEMMODEL_ACQUIRE:
6171 return !pre;
6172 case MEMMODEL_ACQ_REL:
6173 case MEMMODEL_SEQ_CST:
6174 return true;
6175 default:
6176 gcc_unreachable ();
6177 }
6178}
6179\f
1f3233d1 6180#include "gt-emit-rtl.h"