]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
re PR rtl-optimization/83575 (ICE: verify_flow_info failed (error: multiple hot/cold...
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
85ec4feb 2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6 36#include "coretypes.h"
4d0cdd0c 37#include "memmodel.h"
c7131fb2 38#include "backend.h"
957060b5 39#include "target.h"
23b2ce53 40#include "rtl.h"
957060b5 41#include "tree.h"
c7131fb2 42#include "df.h"
957060b5
AM
43#include "tm_p.h"
44#include "stringpool.h"
957060b5
AM
45#include "insn-config.h"
46#include "regs.h"
47#include "emit-rtl.h"
48#include "recog.h"
c7131fb2 49#include "diagnostic-core.h"
40e23961 50#include "alias.h"
40e23961 51#include "fold-const.h"
d8a2d370 52#include "varasm.h"
60393bbc 53#include "cfgrtl.h"
60393bbc 54#include "tree-eh.h"
36566b39 55#include "explow.h"
23b2ce53 56#include "expr.h"
b5b8b0ac 57#include "params.h"
9b2b7279 58#include "builtins.h"
9021b8ec 59#include "rtl-iter.h"
1f9ceff1 60#include "stor-layout.h"
ecf835e9 61#include "opts.h"
5fa396ad 62#include "predict.h"
3877c560 63#include "rtx-vector-builder.h"
ca695ac9 64
5fb0e246
RS
65struct target_rtl default_target_rtl;
66#if SWITCHABLE_TARGET
67struct target_rtl *this_target_rtl = &default_target_rtl;
68#endif
69
70#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71
1d445e9e
ILT
72/* Commonly used modes. */
73
501623d4
RS
74scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
75scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
76scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 77
bd60bab2
JH
78/* Datastructures maintained for currently processed function in RTL form. */
79
3e029763 80struct rtl_data x_rtl;
bd60bab2
JH
81
82/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 83 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87rtx * regno_reg_rtx;
23b2ce53
RS
88
89/* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
044b4de3 92static GTY(()) int label_num = 1;
23b2ce53 93
23b2ce53
RS
94/* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 98
e7c82a99 99rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 100
68d75312
JC
101rtx const_true_rtx;
102
23b2ce53
RS
103REAL_VALUE_TYPE dconst0;
104REAL_VALUE_TYPE dconst1;
105REAL_VALUE_TYPE dconst2;
106REAL_VALUE_TYPE dconstm1;
03f2ea93 107REAL_VALUE_TYPE dconsthalf;
23b2ce53 108
325217ed
CF
109/* Record fixed-point constant 0 and 1. */
110FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112
23b2ce53
RS
113/* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
116 integers. */
117
5da077de 118rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 119
ca4adc91
RS
120/* Standard pieces of rtx, to be substituted directly into things. */
121rtx pc_rtx;
122rtx ret_rtx;
123rtx simple_return_rtx;
124rtx cc0_rtx;
125
1476d1bd
MM
126/* Marker used for denoting an INSN, which should never be accessed (i.e.,
127 this pointer should normally never be dereferenced), but is required to be
128 distinct from NULL_RTX. Currently used by peephole2 pass. */
129rtx_insn *invalid_insn_rtx;
130
c13e8210
MM
131/* A hash table storing CONST_INTs whose absolute value is greater
132 than MAX_SAVED_CONST_INT. */
133
6c907cff 134struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
135{
136 typedef HOST_WIDE_INT compare_type;
137
138 static hashval_t hash (rtx i);
139 static bool equal (rtx i, HOST_WIDE_INT h);
140};
c13e8210 141
aebf76a2
TS
142static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
143
6c907cff 144struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
145{
146 static hashval_t hash (rtx x);
147 static bool equal (rtx x, rtx y);
148};
149
150static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
807e902e 151
0c12fc9b
RS
152struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
153{
154 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
155
156 static hashval_t hash (rtx x);
157 static bool equal (rtx x, const compare_type &y);
158};
159
160static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
161
a560d4d4 162/* A hash table storing register attribute structures. */
6c907cff 163struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
aebf76a2
TS
164{
165 static hashval_t hash (reg_attrs *x);
166 static bool equal (reg_attrs *a, reg_attrs *b);
167};
168
169static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
a560d4d4 170
5692c7bc 171/* A hash table storing all CONST_DOUBLEs. */
6c907cff 172struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
173{
174 static hashval_t hash (rtx x);
175 static bool equal (rtx x, rtx y);
176};
177
178static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
5692c7bc 179
091a3ac7 180/* A hash table storing all CONST_FIXEDs. */
6c907cff 181struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
182{
183 static hashval_t hash (rtx x);
184 static bool equal (rtx x, rtx y);
185};
186
187static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
091a3ac7 188
3e029763 189#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 190#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 191#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 192
5eb2a9f2 193static void set_used_decls (tree);
502b8322 194static void mark_label_nuses (rtx);
807e902e 195#if TARGET_SUPPORTS_WIDE_INT
807e902e
KZ
196static rtx lookup_const_wide_int (rtx);
197#endif
502b8322 198static rtx lookup_const_double (rtx);
091a3ac7 199static rtx lookup_const_fixed (rtx);
ef4bddc2 200static rtx gen_const_vector (machine_mode, int);
32b32b16 201static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 202
5fa396ad
JH
203/* Probability of the conditional branch currently proceeded by try_split. */
204profile_probability split_branch_probability;
ca695ac9 205\f
c13e8210
MM
206/* Returns a hash code for X (which is a really a CONST_INT). */
207
aebf76a2
TS
208hashval_t
209const_int_hasher::hash (rtx x)
c13e8210 210{
aebf76a2 211 return (hashval_t) INTVAL (x);
c13e8210
MM
212}
213
cc2902df 214/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
215 CONST_INT) is the same as that given by Y (which is really a
216 HOST_WIDE_INT *). */
217
aebf76a2
TS
218bool
219const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
c13e8210 220{
aebf76a2 221 return (INTVAL (x) == y);
5692c7bc
ZW
222}
223
807e902e
KZ
224#if TARGET_SUPPORTS_WIDE_INT
225/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
226
aebf76a2
TS
227hashval_t
228const_wide_int_hasher::hash (rtx x)
807e902e
KZ
229{
230 int i;
d7ca26e4 231 unsigned HOST_WIDE_INT hash = 0;
aebf76a2 232 const_rtx xr = x;
807e902e
KZ
233
234 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
235 hash += CONST_WIDE_INT_ELT (xr, i);
236
237 return (hashval_t) hash;
238}
239
240/* Returns nonzero if the value represented by X (which is really a
241 CONST_WIDE_INT) is the same as that given by Y (which is really a
242 CONST_WIDE_INT). */
243
aebf76a2
TS
244bool
245const_wide_int_hasher::equal (rtx x, rtx y)
807e902e
KZ
246{
247 int i;
aebf76a2
TS
248 const_rtx xr = x;
249 const_rtx yr = y;
807e902e 250 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
aebf76a2 251 return false;
807e902e
KZ
252
253 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
254 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
aebf76a2 255 return false;
807e902e 256
aebf76a2 257 return true;
807e902e
KZ
258}
259#endif
260
0c12fc9b
RS
261/* Returns a hash code for CONST_POLY_INT X. */
262
263hashval_t
264const_poly_int_hasher::hash (rtx x)
265{
266 inchash::hash h;
267 h.add_int (GET_MODE (x));
268 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
269 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
270 return h.end ();
271}
272
273/* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
274
275bool
276const_poly_int_hasher::equal (rtx x, const compare_type &y)
277{
278 if (GET_MODE (x) != y.first)
279 return false;
280 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
281 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
282 return false;
283 return true;
284}
285
5692c7bc 286/* Returns a hash code for X (which is really a CONST_DOUBLE). */
aebf76a2
TS
287hashval_t
288const_double_hasher::hash (rtx x)
5692c7bc 289{
aebf76a2 290 const_rtx const value = x;
46b33600 291 hashval_t h;
5692c7bc 292
807e902e 293 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
294 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
295 else
fe352c29 296 {
15c812e3 297 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
298 /* MODE is used in the comparison, so it should be in the hash. */
299 h ^= GET_MODE (value);
300 }
5692c7bc
ZW
301 return h;
302}
303
cc2902df 304/* Returns nonzero if the value represented by X (really a ...)
5692c7bc 305 is the same as that represented by Y (really a ...) */
aebf76a2
TS
306bool
307const_double_hasher::equal (rtx x, rtx y)
5692c7bc 308{
aebf76a2 309 const_rtx const a = x, b = y;
5692c7bc
ZW
310
311 if (GET_MODE (a) != GET_MODE (b))
312 return 0;
807e902e 313 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
314 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
315 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
316 else
317 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
318 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
319}
320
091a3ac7
CF
321/* Returns a hash code for X (which is really a CONST_FIXED). */
322
aebf76a2
TS
323hashval_t
324const_fixed_hasher::hash (rtx x)
091a3ac7 325{
aebf76a2 326 const_rtx const value = x;
091a3ac7
CF
327 hashval_t h;
328
329 h = fixed_hash (CONST_FIXED_VALUE (value));
330 /* MODE is used in the comparison, so it should be in the hash. */
331 h ^= GET_MODE (value);
332 return h;
333}
334
aebf76a2
TS
335/* Returns nonzero if the value represented by X is the same as that
336 represented by Y. */
091a3ac7 337
aebf76a2
TS
338bool
339const_fixed_hasher::equal (rtx x, rtx y)
091a3ac7 340{
aebf76a2 341 const_rtx const a = x, b = y;
091a3ac7
CF
342
343 if (GET_MODE (a) != GET_MODE (b))
344 return 0;
345 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
346}
347
f12144dd 348/* Return true if the given memory attributes are equal. */
c13e8210 349
96b3c03f 350bool
f12144dd 351mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 352{
96b3c03f
RB
353 if (p == q)
354 return true;
355 if (!p || !q)
356 return false;
754c3d5d
RS
357 return (p->alias == q->alias
358 && p->offset_known_p == q->offset_known_p
d05d7551 359 && (!p->offset_known_p || known_eq (p->offset, q->offset))
754c3d5d 360 && p->size_known_p == q->size_known_p
d05d7551 361 && (!p->size_known_p || known_eq (p->size, q->size))
754c3d5d 362 && p->align == q->align
09e881c9 363 && p->addrspace == q->addrspace
78b76d08
SB
364 && (p->expr == q->expr
365 || (p->expr != NULL_TREE && q->expr != NULL_TREE
366 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
367}
368
f12144dd 369/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 370
f12144dd
RS
371static void
372set_mem_attrs (rtx mem, mem_attrs *attrs)
373{
f12144dd
RS
374 /* If everything is the default, we can just clear the attributes. */
375 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
376 {
377 MEM_ATTRS (mem) = 0;
378 return;
379 }
173b24b9 380
84053e02
RB
381 if (!MEM_ATTRS (mem)
382 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 383 {
766090c2 384 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 385 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 386 }
c13e8210
MM
387}
388
a560d4d4
JH
389/* Returns a hash code for X (which is a really a reg_attrs *). */
390
aebf76a2
TS
391hashval_t
392reg_attr_hasher::hash (reg_attrs *x)
a560d4d4 393{
aebf76a2 394 const reg_attrs *const p = x;
a560d4d4 395
84bc717b
RS
396 inchash::hash h;
397 h.add_ptr (p->decl);
398 h.add_poly_hwi (p->offset);
399 return h.end ();
a560d4d4
JH
400}
401
aebf76a2
TS
402/* Returns nonzero if the value represented by X is the same as that given by
403 Y. */
a560d4d4 404
aebf76a2
TS
405bool
406reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
a560d4d4 407{
aebf76a2
TS
408 const reg_attrs *const p = x;
409 const reg_attrs *const q = y;
a560d4d4 410
84bc717b 411 return (p->decl == q->decl && known_eq (p->offset, q->offset));
a560d4d4
JH
412}
413/* Allocate a new reg_attrs structure and insert it into the hash table if
414 one identical to it is not already in the table. We are doing this for
415 MEM of mode MODE. */
416
417static reg_attrs *
84bc717b 418get_reg_attrs (tree decl, poly_int64 offset)
a560d4d4
JH
419{
420 reg_attrs attrs;
a560d4d4
JH
421
422 /* If everything is the default, we can just return zero. */
84bc717b 423 if (decl == 0 && known_eq (offset, 0))
a560d4d4
JH
424 return 0;
425
426 attrs.decl = decl;
427 attrs.offset = offset;
428
aebf76a2 429 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
a560d4d4
JH
430 if (*slot == 0)
431 {
766090c2 432 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
433 memcpy (*slot, &attrs, sizeof (reg_attrs));
434 }
435
aebf76a2 436 return *slot;
a560d4d4
JH
437}
438
6fb5fa3c
DB
439
440#if !HAVE_blockage
adddc347
HPN
441/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
442 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
443
444rtx
445gen_blockage (void)
446{
447 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
448 MEM_VOLATILE_P (x) = true;
449 return x;
450}
451#endif
452
453
8deccbb7
RS
454/* Set the mode and register number of X to MODE and REGNO. */
455
456void
457set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
458{
9188b286 459 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
ad474626 460 ? hard_regno_nregs (regno, mode)
9188b286 461 : 1);
8deccbb7 462 PUT_MODE_RAW (x, mode);
9188b286 463 set_regno_raw (x, regno, nregs);
8deccbb7
RS
464}
465
08394eef
BS
466/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
467 don't attempt to share with the various global pieces of rtl (such as
468 frame_pointer_rtx). */
469
470rtx
8deccbb7 471gen_raw_REG (machine_mode mode, unsigned int regno)
08394eef 472{
84c2ad23 473 rtx x = rtx_alloc (REG MEM_STAT_INFO);
8deccbb7 474 set_mode_and_regno (x, mode, regno);
9fccb335 475 REG_ATTRS (x) = NULL;
08394eef
BS
476 ORIGINAL_REGNO (x) = regno;
477 return x;
478}
479
c5c76735
JL
480/* There are some RTL codes that require special attention; the generation
481 functions do the raw handling. If you add to this list, modify
482 special_rtx in gengenrtl.c as well. */
483
38e60c55 484rtx_expr_list *
ef4bddc2 485gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
38e60c55
DM
486{
487 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
488 expr_list));
489}
490
a756c6be 491rtx_insn_list *
ef4bddc2 492gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
a756c6be
DM
493{
494 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
495 insn_list));
496}
497
d6e1e8b8 498rtx_insn *
ef4bddc2 499gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
d6e1e8b8
DM
500 basic_block bb, rtx pattern, int location, int code,
501 rtx reg_notes)
502{
503 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
504 prev_insn, next_insn,
505 bb, pattern, location, code,
506 reg_notes));
507}
508
3b80f6ca 509rtx
ef4bddc2 510gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca
RH
511{
512 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 513 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
514
515#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
516 if (const_true_rtx && arg == STORE_FLAG_VALUE)
517 return const_true_rtx;
518#endif
519
c13e8210 520 /* Look up the CONST_INT in the hash table. */
aebf76a2
TS
521 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
522 INSERT);
29105cea 523 if (*slot == 0)
1f8f4a0b 524 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210 525
aebf76a2 526 return *slot;
3b80f6ca
RH
527}
528
2496c7bd 529rtx
0c12fc9b 530gen_int_mode (poly_int64 c, machine_mode mode)
2496c7bd 531{
0c12fc9b
RS
532 c = trunc_int_for_mode (c, mode);
533 if (c.is_constant ())
534 return GEN_INT (c.coeffs[0]);
535 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
536 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
2496c7bd
LB
537}
538
5692c7bc
ZW
539/* CONST_DOUBLEs might be created from pairs of integers, or from
540 REAL_VALUE_TYPEs. Also, their length is known only at run time,
541 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
542
543/* Determine whether REAL, a CONST_DOUBLE, already exists in the
544 hash table. If so, return its counterpart; otherwise add it
545 to the hash table and return it. */
546static rtx
502b8322 547lookup_const_double (rtx real)
5692c7bc 548{
aebf76a2 549 rtx *slot = const_double_htab->find_slot (real, INSERT);
5692c7bc
ZW
550 if (*slot == 0)
551 *slot = real;
552
aebf76a2 553 return *slot;
5692c7bc 554}
29105cea 555
5692c7bc
ZW
556/* Return a CONST_DOUBLE rtx for a floating-point value specified by
557 VALUE in mode MODE. */
0133b7d9 558rtx
ef4bddc2 559const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
0133b7d9 560{
5692c7bc
ZW
561 rtx real = rtx_alloc (CONST_DOUBLE);
562 PUT_MODE (real, mode);
563
9e254451 564 real->u.rv = value;
5692c7bc
ZW
565
566 return lookup_const_double (real);
567}
568
091a3ac7
CF
569/* Determine whether FIXED, a CONST_FIXED, already exists in the
570 hash table. If so, return its counterpart; otherwise add it
571 to the hash table and return it. */
572
573static rtx
574lookup_const_fixed (rtx fixed)
575{
aebf76a2 576 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
091a3ac7
CF
577 if (*slot == 0)
578 *slot = fixed;
579
aebf76a2 580 return *slot;
091a3ac7
CF
581}
582
583/* Return a CONST_FIXED rtx for a fixed-point value specified by
584 VALUE in mode MODE. */
585
586rtx
ef4bddc2 587const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
091a3ac7
CF
588{
589 rtx fixed = rtx_alloc (CONST_FIXED);
590 PUT_MODE (fixed, mode);
591
592 fixed->u.fv = value;
593
594 return lookup_const_fixed (fixed);
595}
596
807e902e 597#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
598/* Constructs double_int from rtx CST. */
599
600double_int
601rtx_to_double_int (const_rtx cst)
602{
603 double_int r;
604
605 if (CONST_INT_P (cst))
27bcd47c 606 r = double_int::from_shwi (INTVAL (cst));
48175537 607 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
608 {
609 r.low = CONST_DOUBLE_LOW (cst);
610 r.high = CONST_DOUBLE_HIGH (cst);
611 }
612 else
613 gcc_unreachable ();
614
615 return r;
616}
807e902e 617#endif
3e93ff81 618
807e902e
KZ
619#if TARGET_SUPPORTS_WIDE_INT
620/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
621 If so, return its counterpart; otherwise add it to the hash table and
622 return it. */
3e93ff81 623
807e902e
KZ
624static rtx
625lookup_const_wide_int (rtx wint)
626{
aebf76a2 627 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
807e902e
KZ
628 if (*slot == 0)
629 *slot = wint;
630
aebf76a2 631 return *slot;
807e902e
KZ
632}
633#endif
634
635/* Return an rtx constant for V, given that the constant has mode MODE.
636 The returned rtx will be a CONST_INT if V fits, otherwise it will be
637 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
638 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0 639
0c12fc9b
RS
640static rtx
641immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
54fb1ae0 642{
807e902e 643 unsigned int len = v.get_len ();
db61b7f9
RS
644 /* Not scalar_int_mode because we also allow pointer bound modes. */
645 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
807e902e
KZ
646
647 /* Allow truncation but not extension since we do not know if the
648 number is signed or unsigned. */
649 gcc_assert (prec <= v.get_precision ());
650
651 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
652 return gen_int_mode (v.elt (0), mode);
653
654#if TARGET_SUPPORTS_WIDE_INT
655 {
656 unsigned int i;
657 rtx value;
658 unsigned int blocks_needed
659 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
660
661 if (len > blocks_needed)
662 len = blocks_needed;
663
664 value = const_wide_int_alloc (len);
665
666 /* It is so tempting to just put the mode in here. Must control
667 myself ... */
668 PUT_MODE (value, VOIDmode);
669 CWI_PUT_NUM_ELEM (value, len);
670
671 for (i = 0; i < len; i++)
672 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
673
674 return lookup_const_wide_int (value);
675 }
676#else
677 return immed_double_const (v.elt (0), v.elt (1), mode);
678#endif
54fb1ae0
AS
679}
680
807e902e 681#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
682/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
683 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 684 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
685 implied upper bits are copies of the high bit of i1. The value
686 itself is neither signed nor unsigned. Do not use this routine for
687 non-integer modes; convert to REAL_VALUE_TYPE and use
555affd7 688 const_double_from_real_value. */
5692c7bc
ZW
689
690rtx
ef4bddc2 691immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
5692c7bc
ZW
692{
693 rtx value;
694 unsigned int i;
695
65acccdd 696 /* There are the following cases (note that there are no modes with
49ab6098 697 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
698
699 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
700 gen_int_mode.
929e10f4
MS
701 2) If the value of the integer fits into HOST_WIDE_INT anyway
702 (i.e., i1 consists only from copies of the sign bit, and sign
703 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 704 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
db61b7f9
RS
705 scalar_mode smode;
706 if (is_a <scalar_mode> (mode, &smode)
707 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
708 return gen_int_mode (i0, mode);
5692c7bc
ZW
709
710 /* If this integer fits in one word, return a CONST_INT. */
711 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
712 return GEN_INT (i0);
713
714 /* We use VOIDmode for integers. */
715 value = rtx_alloc (CONST_DOUBLE);
716 PUT_MODE (value, VOIDmode);
717
718 CONST_DOUBLE_LOW (value) = i0;
719 CONST_DOUBLE_HIGH (value) = i1;
720
721 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
722 XWINT (value, i) = 0;
723
724 return lookup_const_double (value);
0133b7d9 725}
807e902e 726#endif
0133b7d9 727
0c12fc9b
RS
728/* Return an rtx representation of C in mode MODE. */
729
730rtx
731immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
732{
733 if (c.is_constant ())
734 return immed_wide_int_const_1 (c.coeffs[0], mode);
735
736 /* Not scalar_int_mode because we also allow pointer bound modes. */
737 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
738
739 /* Allow truncation but not extension since we do not know if the
740 number is signed or unsigned. */
741 gcc_assert (prec <= c.coeffs[0].get_precision ());
742 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
743
744 /* See whether we already have an rtx for this constant. */
745 inchash::hash h;
746 h.add_int (mode);
747 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
748 h.add_wide_int (newc.coeffs[i]);
749 const_poly_int_hasher::compare_type typed_value (mode, newc);
750 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
751 h.end (), INSERT);
752 rtx x = *slot;
753 if (x)
754 return x;
755
756 /* Create a new rtx. There's a choice to be made here between installing
757 the actual mode of the rtx or leaving it as VOIDmode (for consistency
758 with CONST_INT). In practice the handling of the codes is different
759 enough that we get no benefit from using VOIDmode, and various places
760 assume that VOIDmode implies CONST_INT. Using the real mode seems like
761 the right long-term direction anyway. */
762 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
763 size_t extra_size = twi::extra_size (prec);
764 x = rtx_alloc_v (CONST_POLY_INT,
765 sizeof (struct const_poly_int_def) + extra_size);
766 PUT_MODE (x, mode);
767 CONST_POLY_INT_COEFFS (x).set_precision (prec);
768 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
769 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
770
771 *slot = x;
772 return x;
773}
774
3b80f6ca 775rtx
ef4bddc2 776gen_rtx_REG (machine_mode mode, unsigned int regno)
3b80f6ca
RH
777{
778 /* In case the MD file explicitly references the frame pointer, have
779 all such references point to the same frame pointer. This is
780 used during frame pointer elimination to distinguish the explicit
781 references to these registers from pseudos that happened to be
782 assigned to them.
783
784 If we have eliminated the frame pointer or arg pointer, we will
785 be using it as a normal register, for example as a spill
786 register. In such cases, we might be accessing it in a mode that
787 is not Pmode and therefore cannot use the pre-allocated rtx.
788
789 Also don't do this when we are making new REGs in reload, since
790 we don't want to get confused with the real pointers. */
791
55a2c322 792 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 793 {
e10c79fe
LB
794 if (regno == FRAME_POINTER_REGNUM
795 && (!reload_completed || frame_pointer_needed))
3b80f6ca 796 return frame_pointer_rtx;
c3e08036
TS
797
798 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
799 && regno == HARD_FRAME_POINTER_REGNUM
e10c79fe 800 && (!reload_completed || frame_pointer_needed))
3b80f6ca 801 return hard_frame_pointer_rtx;
3f393fc6
TS
802#if !HARD_FRAME_POINTER_IS_ARG_POINTER
803 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
804 && regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
805 return arg_pointer_rtx;
806#endif
807#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 808 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
809 return return_address_pointer_rtx;
810#endif
fc555370 811 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 812 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 813 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 814 return pic_offset_table_rtx;
bcb33994 815 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
816 return stack_pointer_rtx;
817 }
818
006a94b0 819#if 0
6cde4876 820 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
821 an existing entry in that table to avoid useless generation of RTL.
822
823 This code is disabled for now until we can fix the various backends
824 which depend on having non-shared hard registers in some cases. Long
825 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
826 on the amount of useless RTL that gets generated.
827
828 We'll also need to fix some code that runs after reload that wants to
829 set ORIGINAL_REGNO. */
830
6cde4876
JL
831 if (cfun
832 && cfun->emit
833 && regno_reg_rtx
834 && regno < FIRST_PSEUDO_REGISTER
835 && reg_raw_mode[regno] == mode)
836 return regno_reg_rtx[regno];
006a94b0 837#endif
6cde4876 838
08394eef 839 return gen_raw_REG (mode, regno);
3b80f6ca
RH
840}
841
41472af8 842rtx
ef4bddc2 843gen_rtx_MEM (machine_mode mode, rtx addr)
41472af8
MM
844{
845 rtx rt = gen_rtx_raw_MEM (mode, addr);
846
847 /* This field is not cleared by the mere allocation of the rtx, so
848 we clear it here. */
173b24b9 849 MEM_ATTRS (rt) = 0;
41472af8
MM
850
851 return rt;
852}
ddef6bc7 853
542a8afa
RH
854/* Generate a memory referring to non-trapping constant memory. */
855
856rtx
ef4bddc2 857gen_const_mem (machine_mode mode, rtx addr)
542a8afa
RH
858{
859 rtx mem = gen_rtx_MEM (mode, addr);
860 MEM_READONLY_P (mem) = 1;
861 MEM_NOTRAP_P (mem) = 1;
862 return mem;
863}
864
bf877a76
R
865/* Generate a MEM referring to fixed portions of the frame, e.g., register
866 save areas. */
867
868rtx
ef4bddc2 869gen_frame_mem (machine_mode mode, rtx addr)
bf877a76
R
870{
871 rtx mem = gen_rtx_MEM (mode, addr);
872 MEM_NOTRAP_P (mem) = 1;
873 set_mem_alias_set (mem, get_frame_alias_set ());
874 return mem;
875}
876
877/* Generate a MEM referring to a temporary use of the stack, not part
878 of the fixed stack frame. For example, something which is pushed
879 by a target splitter. */
880rtx
ef4bddc2 881gen_tmp_stack_mem (machine_mode mode, rtx addr)
bf877a76
R
882{
883 rtx mem = gen_rtx_MEM (mode, addr);
884 MEM_NOTRAP_P (mem) = 1;
e3b5732b 885 if (!cfun->calls_alloca)
bf877a76
R
886 set_mem_alias_set (mem, get_frame_alias_set ());
887 return mem;
888}
889
beb72684
RH
890/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
891 this construct would be valid, and false otherwise. */
892
893bool
ef4bddc2 894validate_subreg (machine_mode omode, machine_mode imode,
91914e56 895 const_rtx reg, poly_uint64 offset)
ddef6bc7 896{
fad2288b
RS
897 poly_uint64 isize = GET_MODE_SIZE (imode);
898 poly_uint64 osize = GET_MODE_SIZE (omode);
899
900 /* The sizes must be ordered, so that we know whether the subreg
901 is partial, paradoxical or complete. */
902 if (!ordered_p (isize, osize))
903 return false;
beb72684
RH
904
905 /* All subregs must be aligned. */
91914e56 906 if (!multiple_p (offset, osize))
beb72684
RH
907 return false;
908
909 /* The subreg offset cannot be outside the inner object. */
91914e56 910 if (maybe_ge (offset, isize))
beb72684
RH
911 return false;
912
fad2288b 913 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
1eae67f8 914
beb72684
RH
915 /* ??? This should not be here. Temporarily continue to allow word_mode
916 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
917 Generally, backends are doing something sketchy but it'll take time to
918 fix them all. */
919 if (omode == word_mode)
920 ;
921 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
922 is the culprit here, and not the backends. */
fad2288b 923 else if (known_ge (osize, regsize) && known_ge (isize, osize))
beb72684
RH
924 ;
925 /* Allow component subregs of complex and vector. Though given the below
926 extraction rules, it's not always clear what that means. */
927 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
928 && GET_MODE_INNER (imode) == omode)
929 ;
930 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
931 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
932 represent this. It's questionable if this ought to be represented at
933 all -- why can't this all be hidden in post-reload splitters that make
934 arbitrarily mode changes to the registers themselves. */
935 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
936 ;
937 /* Subregs involving floating point modes are not allowed to
938 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
939 (subreg:SI (reg:DF) 0) isn't. */
940 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
941 {
fad2288b 942 if (! (known_eq (isize, osize)
55a2c322
VM
943 /* LRA can use subreg to store a floating point value in
944 an integer mode. Although the floating point and the
945 integer modes need the same number of hard registers,
946 the size of floating point mode can be less than the
947 integer mode. LRA also uses subregs for a register
948 should be used in different mode in on insn. */
949 || lra_in_progress))
beb72684
RH
950 return false;
951 }
ddef6bc7 952
beb72684 953 /* Paradoxical subregs must have offset zero. */
fad2288b 954 if (maybe_gt (osize, isize))
91914e56 955 return known_eq (offset, 0U);
beb72684
RH
956
957 /* This is a normal subreg. Verify that the offset is representable. */
958
959 /* For hard registers, we already have most of these rules collected in
960 subreg_offset_representable_p. */
961 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
962 {
963 unsigned int regno = REGNO (reg);
964
beb72684
RH
965 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
966 && GET_MODE_INNER (imode) == omode)
967 ;
0d803030 968 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
beb72684 969 return false;
beb72684
RH
970
971 return subreg_offset_representable_p (regno, imode, offset, omode);
972 }
973
fad2288b
RS
974 /* The outer size must be ordered wrt the register size, otherwise
975 we wouldn't know at compile time how many registers the outer
976 mode occupies. */
977 if (!ordered_p (osize, regsize))
978 return false;
979
beb72684 980 /* For pseudo registers, we want most of the same checks. Namely:
1eae67f8
RS
981
982 Assume that the pseudo register will be allocated to hard registers
983 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
984 the remainder must correspond to the lowpart of the containing hard
985 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
986 otherwise it is at the lowest offset.
987
988 Given that we've already checked the mode and offset alignment,
989 we only have to check subblock subregs here. */
fad2288b 990 if (maybe_lt (osize, regsize)
55a2c322 991 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684 992 {
fad2288b
RS
993 /* It is invalid for the target to pick a register size for a mode
994 that isn't ordered wrt to the size of that mode. */
995 poly_uint64 block_size = ordered_min (isize, regsize);
91914e56
RS
996 unsigned int start_reg;
997 poly_uint64 offset_within_reg;
998 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
999 || (BYTES_BIG_ENDIAN
1000 ? maybe_ne (offset_within_reg, block_size - osize)
1001 : maybe_ne (offset_within_reg, 0U)))
beb72684
RH
1002 return false;
1003 }
1004 return true;
1005}
1006
1007rtx
91914e56 1008gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
beb72684
RH
1009{
1010 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 1011 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
1012}
1013
173b24b9
RK
1014/* Generate a SUBREG representing the least-significant part of REG if MODE
1015 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1016
ddef6bc7 1017rtx
ef4bddc2 1018gen_lowpart_SUBREG (machine_mode mode, rtx reg)
ddef6bc7 1019{
ef4bddc2 1020 machine_mode inmode;
ddef6bc7
JJ
1021
1022 inmode = GET_MODE (reg);
1023 if (inmode == VOIDmode)
1024 inmode = mode;
e0e08ac2
JH
1025 return gen_rtx_SUBREG (mode, reg,
1026 subreg_lowpart_offset (mode, inmode));
ddef6bc7 1027}
fcc74520
RS
1028
1029rtx
ef4bddc2 1030gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
fcc74520
RS
1031 enum var_init_status status)
1032{
1033 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1034 PAT_VAR_LOCATION_STATUS (x) = status;
1035 return x;
1036}
c5c76735 1037\f
23b2ce53 1038
80379f51
PB
1039/* Create an rtvec and stores within it the RTXen passed in the arguments. */
1040
23b2ce53 1041rtvec
e34d07f2 1042gen_rtvec (int n, ...)
23b2ce53 1043{
80379f51
PB
1044 int i;
1045 rtvec rt_val;
e34d07f2 1046 va_list p;
23b2ce53 1047
e34d07f2 1048 va_start (p, n);
23b2ce53 1049
80379f51 1050 /* Don't allocate an empty rtvec... */
23b2ce53 1051 if (n == 0)
0edf1bb2
JL
1052 {
1053 va_end (p);
1054 return NULL_RTVEC;
1055 }
23b2ce53 1056
80379f51 1057 rt_val = rtvec_alloc (n);
4f90e4a0 1058
23b2ce53 1059 for (i = 0; i < n; i++)
80379f51 1060 rt_val->elem[i] = va_arg (p, rtx);
6268b922 1061
e34d07f2 1062 va_end (p);
80379f51 1063 return rt_val;
23b2ce53
RS
1064}
1065
1066rtvec
502b8322 1067gen_rtvec_v (int n, rtx *argp)
23b2ce53 1068{
b3694847
SS
1069 int i;
1070 rtvec rt_val;
23b2ce53 1071
80379f51 1072 /* Don't allocate an empty rtvec... */
23b2ce53 1073 if (n == 0)
80379f51 1074 return NULL_RTVEC;
23b2ce53 1075
80379f51 1076 rt_val = rtvec_alloc (n);
23b2ce53
RS
1077
1078 for (i = 0; i < n; i++)
8f985ec4 1079 rt_val->elem[i] = *argp++;
23b2ce53
RS
1080
1081 return rt_val;
1082}
e6eda746
DM
1083
1084rtvec
1085gen_rtvec_v (int n, rtx_insn **argp)
1086{
1087 int i;
1088 rtvec rt_val;
1089
1090 /* Don't allocate an empty rtvec... */
1091 if (n == 0)
1092 return NULL_RTVEC;
1093
1094 rt_val = rtvec_alloc (n);
1095
1096 for (i = 0; i < n; i++)
1097 rt_val->elem[i] = *argp++;
1098
1099 return rt_val;
1100}
1101
23b2ce53 1102\f
38ae7651
RS
1103/* Return the number of bytes between the start of an OUTER_MODE
1104 in-memory value and the start of an INNER_MODE in-memory value,
1105 given that the former is a lowpart of the latter. It may be a
1106 paradoxical lowpart, in which case the offset will be negative
1107 on big-endian targets. */
1108
91914e56 1109poly_int64
ef4bddc2
RS
1110byte_lowpart_offset (machine_mode outer_mode,
1111 machine_mode inner_mode)
38ae7651 1112{
03a95621 1113 if (paradoxical_subreg_p (outer_mode, inner_mode))
38ae7651 1114 return -subreg_lowpart_offset (inner_mode, outer_mode);
03a95621
RS
1115 else
1116 return subreg_lowpart_offset (outer_mode, inner_mode);
38ae7651 1117}
3d09ba95
RS
1118
1119/* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1120 from address X. For paradoxical big-endian subregs this is a
1121 negative value, otherwise it's the same as OFFSET. */
1122
91914e56 1123poly_int64
3d09ba95 1124subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
91914e56 1125 poly_uint64 offset)
3d09ba95
RS
1126{
1127 if (paradoxical_subreg_p (outer_mode, inner_mode))
1128 {
91914e56 1129 gcc_assert (known_eq (offset, 0U));
3d09ba95
RS
1130 return -subreg_lowpart_offset (inner_mode, outer_mode);
1131 }
1132 return offset;
1133}
1134
1135/* As above, but return the offset that existing subreg X would have
1136 if SUBREG_REG (X) were stored in memory. The only significant thing
1137 about the current SUBREG_REG is its mode. */
1138
91914e56 1139poly_int64
3d09ba95
RS
1140subreg_memory_offset (const_rtx x)
1141{
1142 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1143 SUBREG_BYTE (x));
1144}
38ae7651 1145\f
23b2ce53
RS
1146/* Generate a REG rtx for a new pseudo register of mode MODE.
1147 This pseudo is assigned the next sequential register number. */
1148
1149rtx
ef4bddc2 1150gen_reg_rtx (machine_mode mode)
23b2ce53 1151{
b3694847 1152 rtx val;
2e3f842f 1153 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 1154
f8335a4f 1155 gcc_assert (can_create_pseudo_p ());
23b2ce53 1156
2e3f842f
L
1157 /* If a virtual register with bigger mode alignment is generated,
1158 increase stack alignment estimation because it might be spilled
1159 to stack later. */
b8698a0f 1160 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
1161 && crtl->stack_alignment_estimated < align
1162 && !crtl->stack_realign_processed)
ae58e548
JJ
1163 {
1164 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1165 if (crtl->stack_alignment_estimated < min_align)
1166 crtl->stack_alignment_estimated = min_align;
1167 }
2e3f842f 1168
1b3d8f8a
GK
1169 if (generating_concat_p
1170 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1171 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
1172 {
1173 /* For complex modes, don't make a single pseudo.
1174 Instead, make a CONCAT of two pseudos.
1175 This allows noncontiguous allocation of the real and imaginary parts,
1176 which makes much better code. Besides, allocating DCmode
1177 pseudos overstrains reload on some machines like the 386. */
1178 rtx realpart, imagpart;
ef4bddc2 1179 machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1180
1181 realpart = gen_reg_rtx (partmode);
1182 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1183 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1184 }
1185
004a7e45
UB
1186 /* Do not call gen_reg_rtx with uninitialized crtl. */
1187 gcc_assert (crtl->emit.regno_pointer_align_length);
1188
f44986d7
DM
1189 crtl->emit.ensure_regno_capacity ();
1190 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
23b2ce53 1191
f44986d7
DM
1192 val = gen_raw_REG (mode, reg_rtx_no);
1193 regno_reg_rtx[reg_rtx_no++] = val;
1194 return val;
1195}
0d4903b8 1196
f44986d7
DM
1197/* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1198 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
49ad7cfa 1199
f44986d7
DM
1200void
1201emit_status::ensure_regno_capacity ()
1202{
1203 int old_size = regno_pointer_align_length;
23b2ce53 1204
f44986d7
DM
1205 if (reg_rtx_no < old_size)
1206 return;
23b2ce53 1207
f44986d7
DM
1208 int new_size = old_size * 2;
1209 while (reg_rtx_no >= new_size)
1210 new_size *= 2;
1211
1212 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1213 memset (tmp + old_size, 0, new_size - old_size);
1214 regno_pointer_align = (unsigned char *) tmp;
1215
1216 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1217 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1218 regno_reg_rtx = new1;
1219
1220 crtl->emit.regno_pointer_align_length = new_size;
23b2ce53
RS
1221}
1222
a698cc03
JL
1223/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1224
1225bool
1226reg_is_parm_p (rtx reg)
1227{
1228 tree decl;
1229
1230 gcc_assert (REG_P (reg));
1231 decl = REG_EXPR (reg);
1232 return (decl && TREE_CODE (decl) == PARM_DECL);
1233}
1234
38ae7651
RS
1235/* Update NEW with the same attributes as REG, but with OFFSET added
1236 to the REG_OFFSET. */
a560d4d4 1237
e53a16e7 1238static void
84bc717b 1239update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
a560d4d4 1240{
60564289 1241 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
84bc717b 1242 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1243}
1244
38ae7651
RS
1245/* Generate a register with same attributes as REG, but with OFFSET
1246 added to the REG_OFFSET. */
e53a16e7
ILT
1247
1248rtx
ef4bddc2 1249gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
84bc717b 1250 poly_int64 offset)
e53a16e7 1251{
60564289 1252 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1253
60564289
KG
1254 update_reg_offset (new_rtx, reg, offset);
1255 return new_rtx;
e53a16e7
ILT
1256}
1257
1258/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1259 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1260
1261rtx
ef4bddc2 1262gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
e53a16e7 1263{
60564289 1264 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1265
60564289
KG
1266 update_reg_offset (new_rtx, reg, offset);
1267 return new_rtx;
a560d4d4
JH
1268}
1269
38ae7651
RS
1270/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1271 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1272
1273void
ef4bddc2 1274adjust_reg_mode (rtx reg, machine_mode mode)
a560d4d4 1275{
38ae7651
RS
1276 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1277 PUT_MODE (reg, mode);
1278}
1279
1280/* Copy REG's attributes from X, if X has any attributes. If REG and X
1281 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1282
1283void
1284set_reg_attrs_from_value (rtx reg, rtx x)
1285{
84bc717b 1286 poly_int64 offset;
de6f3f7a
L
1287 bool can_be_reg_pointer = true;
1288
1289 /* Don't call mark_reg_pointer for incompatible pointer sign
1290 extension. */
1291 while (GET_CODE (x) == SIGN_EXTEND
1292 || GET_CODE (x) == ZERO_EXTEND
1293 || GET_CODE (x) == TRUNCATE
1294 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1295 {
2a870875
RS
1296#if defined(POINTERS_EXTEND_UNSIGNED)
1297 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
8d8e740c
BE
1298 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1299 || (paradoxical_subreg_p (x)
1300 && ! (SUBREG_PROMOTED_VAR_P (x)
1301 && SUBREG_CHECK_PROMOTED_SIGN (x,
1302 POINTERS_EXTEND_UNSIGNED))))
2a870875 1303 && !targetm.have_ptr_extend ())
de6f3f7a
L
1304 can_be_reg_pointer = false;
1305#endif
1306 x = XEXP (x, 0);
1307 }
38ae7651 1308
923ba36f
JJ
1309 /* Hard registers can be reused for multiple purposes within the same
1310 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1311 on them is wrong. */
1312 if (HARD_REGISTER_P (reg))
1313 return;
1314
38ae7651 1315 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1316 if (MEM_P (x))
1317 {
527210c4
RS
1318 if (MEM_OFFSET_KNOWN_P (x))
1319 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1320 MEM_OFFSET (x) + offset);
de6f3f7a 1321 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1322 mark_reg_pointer (reg, 0);
46b71b03
PB
1323 }
1324 else if (REG_P (x))
1325 {
1326 if (REG_ATTRS (x))
1327 update_reg_offset (reg, x, offset);
de6f3f7a 1328 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1329 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1330 }
1331}
1332
1333/* Generate a REG rtx for a new pseudo register, copying the mode
1334 and attributes from X. */
1335
1336rtx
1337gen_reg_rtx_and_attrs (rtx x)
1338{
1339 rtx reg = gen_reg_rtx (GET_MODE (x));
1340 set_reg_attrs_from_value (reg, x);
1341 return reg;
a560d4d4
JH
1342}
1343
9d18e06b
JZ
1344/* Set the register attributes for registers contained in PARM_RTX.
1345 Use needed values from memory attributes of MEM. */
1346
1347void
502b8322 1348set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1349{
f8cfc6aa 1350 if (REG_P (parm_rtx))
38ae7651 1351 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1352 else if (GET_CODE (parm_rtx) == PARALLEL)
1353 {
1354 /* Check for a NULL entry in the first slot, used to indicate that the
1355 parameter goes both on the stack and in registers. */
1356 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1357 for (; i < XVECLEN (parm_rtx, 0); i++)
1358 {
1359 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1360 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1361 REG_ATTRS (XEXP (x, 0))
1362 = get_reg_attrs (MEM_EXPR (mem),
1363 INTVAL (XEXP (x, 1)));
1364 }
1365 }
1366}
1367
38ae7651
RS
1368/* Set the REG_ATTRS for registers in value X, given that X represents
1369 decl T. */
a560d4d4 1370
4e3825db 1371void
38ae7651
RS
1372set_reg_attrs_for_decl_rtl (tree t, rtx x)
1373{
1f9ceff1
AO
1374 if (!t)
1375 return;
1376 tree tdecl = t;
38ae7651 1377 if (GET_CODE (x) == SUBREG)
fbe6ec81 1378 {
38ae7651
RS
1379 gcc_assert (subreg_lowpart_p (x));
1380 x = SUBREG_REG (x);
fbe6ec81 1381 }
f8cfc6aa 1382 if (REG_P (x))
38ae7651
RS
1383 REG_ATTRS (x)
1384 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1f9ceff1
AO
1385 DECL_P (tdecl)
1386 ? DECL_MODE (tdecl)
1387 : TYPE_MODE (TREE_TYPE (tdecl))));
a560d4d4
JH
1388 if (GET_CODE (x) == CONCAT)
1389 {
1390 if (REG_P (XEXP (x, 0)))
1391 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1392 if (REG_P (XEXP (x, 1)))
1393 REG_ATTRS (XEXP (x, 1))
1394 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1395 }
1396 if (GET_CODE (x) == PARALLEL)
1397 {
d4afac5b
JZ
1398 int i, start;
1399
1400 /* Check for a NULL entry, used to indicate that the parameter goes
1401 both on the stack and in registers. */
1402 if (XEXP (XVECEXP (x, 0, 0), 0))
1403 start = 0;
1404 else
1405 start = 1;
1406
1407 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1408 {
1409 rtx y = XVECEXP (x, 0, i);
1410 if (REG_P (XEXP (y, 0)))
1411 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1412 }
1413 }
1414}
1415
38ae7651
RS
1416/* Assign the RTX X to declaration T. */
1417
1418void
1419set_decl_rtl (tree t, rtx x)
1420{
1421 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1422 if (x)
1423 set_reg_attrs_for_decl_rtl (t, x);
1424}
1425
5141868d
RS
1426/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1427 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1428
1429void
5141868d 1430set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1431{
1432 DECL_INCOMING_RTL (t) = x;
5141868d 1433 if (x && !by_reference_p)
38ae7651
RS
1434 set_reg_attrs_for_decl_rtl (t, x);
1435}
1436
754fdcca
RK
1437/* Identify REG (which may be a CONCAT) as a user register. */
1438
1439void
502b8322 1440mark_user_reg (rtx reg)
754fdcca
RK
1441{
1442 if (GET_CODE (reg) == CONCAT)
1443 {
1444 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1445 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1446 }
754fdcca 1447 else
5b0264cb
NS
1448 {
1449 gcc_assert (REG_P (reg));
1450 REG_USERVAR_P (reg) = 1;
1451 }
754fdcca
RK
1452}
1453
86fe05e0
RK
1454/* Identify REG as a probable pointer register and show its alignment
1455 as ALIGN, if nonzero. */
23b2ce53
RS
1456
1457void
502b8322 1458mark_reg_pointer (rtx reg, int align)
23b2ce53 1459{
3502dc9c 1460 if (! REG_POINTER (reg))
00995e78 1461 {
3502dc9c 1462 REG_POINTER (reg) = 1;
86fe05e0 1463
00995e78
RE
1464 if (align)
1465 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1466 }
1467 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1468 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1469 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1470}
1471
1472/* Return 1 plus largest pseudo reg number used in the current function. */
1473
1474int
502b8322 1475max_reg_num (void)
23b2ce53
RS
1476{
1477 return reg_rtx_no;
1478}
1479
1480/* Return 1 + the largest label number used so far in the current function. */
1481
1482int
502b8322 1483max_label_num (void)
23b2ce53 1484{
23b2ce53
RS
1485 return label_num;
1486}
1487
1488/* Return first label number used in this function (if any were used). */
1489
1490int
502b8322 1491get_first_label_num (void)
23b2ce53
RS
1492{
1493 return first_label_num;
1494}
6de9cd9a
DN
1495
1496/* If the rtx for label was created during the expansion of a nested
1497 function, then first_label_num won't include this label number.
fa10beec 1498 Fix this now so that array indices work later. */
6de9cd9a
DN
1499
1500void
9aa50db7 1501maybe_set_first_label_num (rtx_code_label *x)
6de9cd9a
DN
1502{
1503 if (CODE_LABEL_NUMBER (x) < first_label_num)
1504 first_label_num = CODE_LABEL_NUMBER (x);
1505}
51b86113
DM
1506
1507/* For use by the RTL function loader, when mingling with normal
1508 functions.
1509 Ensure that label_num is greater than the label num of X, to avoid
1510 duplicate labels in the generated assembler. */
1511
1512void
1513maybe_set_max_label_num (rtx_code_label *x)
1514{
1515 if (CODE_LABEL_NUMBER (x) >= label_num)
1516 label_num = CODE_LABEL_NUMBER (x) + 1;
1517}
1518
23b2ce53
RS
1519\f
1520/* Return a value representing some low-order bits of X, where the number
1521 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1522 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1523 representation is returned.
1524
1525 This function handles the cases in common between gen_lowpart, below,
1526 and two variants in cse.c and combine.c. These are the cases that can
1527 be safely handled at all points in the compilation.
1528
1529 If this is not a case we can handle, return 0. */
1530
1531rtx
ef4bddc2 1532gen_lowpart_common (machine_mode mode, rtx x)
23b2ce53 1533{
fad2288b 1534 poly_uint64 msize = GET_MODE_SIZE (mode);
ef4bddc2 1535 machine_mode innermode;
550d1387
GK
1536
1537 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1538 so we have to make one up. Yuk. */
1539 innermode = GET_MODE (x);
481683e1 1540 if (CONST_INT_P (x)
fad2288b
RS
1541 && known_le (msize * BITS_PER_UNIT,
1542 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
f4b31647 1543 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
550d1387 1544 else if (innermode == VOIDmode)
f4b31647 1545 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
b8698a0f 1546
5b0264cb 1547 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1548
550d1387 1549 if (innermode == mode)
23b2ce53
RS
1550 return x;
1551
fad2288b
RS
1552 /* The size of the outer and inner modes must be ordered. */
1553 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1554 if (!ordered_p (msize, xsize))
1555 return 0;
1556
1eae67f8
RS
1557 if (SCALAR_FLOAT_MODE_P (mode))
1558 {
1559 /* Don't allow paradoxical FLOAT_MODE subregs. */
fad2288b 1560 if (maybe_gt (msize, xsize))
1eae67f8
RS
1561 return 0;
1562 }
1563 else
1564 {
1565 /* MODE must occupy no more of the underlying registers than X. */
fad2288b
RS
1566 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1567 unsigned int mregs, xregs;
1568 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1569 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1570 || mregs > xregs)
1eae67f8
RS
1571 return 0;
1572 }
53501a19 1573
54651377 1574 scalar_int_mode int_mode, int_innermode, from_mode;
23b2ce53 1575 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
54651377
RS
1576 && is_a <scalar_int_mode> (mode, &int_mode)
1577 && is_a <scalar_int_mode> (innermode, &int_innermode)
1578 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
23b2ce53
RS
1579 {
1580 /* If we are getting the low-order part of something that has been
1581 sign- or zero-extended, we can either just use the object being
1582 extended or make a narrower extension. If we want an even smaller
1583 piece than the size of the object being extended, call ourselves
1584 recursively.
1585
1586 This case is used mostly by combine and cse. */
1587
54651377 1588 if (from_mode == int_mode)
23b2ce53 1589 return XEXP (x, 0);
54651377
RS
1590 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1591 return gen_lowpart_common (int_mode, XEXP (x, 0));
1592 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1593 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
23b2ce53 1594 }
f8cfc6aa 1595 else if (GET_CODE (x) == SUBREG || REG_P (x)
06ec586d 1596 || GET_CODE (x) == CONCAT || const_vec_p (x)
0c12fc9b
RS
1597 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1598 || CONST_POLY_INT_P (x))
3403a1a9 1599 return lowpart_subreg (mode, x, innermode);
8aada4ad 1600
23b2ce53
RS
1601 /* Otherwise, we can't do this. */
1602 return 0;
1603}
1604\f
ccba022b 1605rtx
ef4bddc2 1606gen_highpart (machine_mode mode, rtx x)
ccba022b 1607{
cf098191 1608 poly_uint64 msize = GET_MODE_SIZE (mode);
e0e08ac2 1609 rtx result;
ddef6bc7 1610
ccba022b
RS
1611 /* This case loses if X is a subreg. To catch bugs early,
1612 complain if an invalid MODE is used even in other cases. */
cf098191
RS
1613 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1614 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
ddef6bc7 1615
e0e08ac2
JH
1616 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1617 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1618 gcc_assert (result);
b8698a0f 1619
09482e0d
JW
1620 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1621 the target if we have a MEM. gen_highpart must return a valid operand,
1622 emitting code if necessary to do so. */
5b0264cb
NS
1623 if (MEM_P (result))
1624 {
1625 result = validize_mem (result);
1626 gcc_assert (result);
1627 }
b8698a0f 1628
e0e08ac2
JH
1629 return result;
1630}
5222e470 1631
26d249eb 1632/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1633 be VOIDmode constant. */
1634rtx
ef4bddc2 1635gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
5222e470
JH
1636{
1637 if (GET_MODE (exp) != VOIDmode)
1638 {
5b0264cb 1639 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1640 return gen_highpart (outermode, exp);
1641 }
1642 return simplify_gen_subreg (outermode, exp, innermode,
1643 subreg_highpart_offset (outermode, innermode));
1644}
68252e27 1645
33951763
RS
1646/* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1647 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
8698cce3 1648
91914e56
RS
1649poly_uint64
1650subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
e0e08ac2 1651{
91914e56
RS
1652 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1653 if (maybe_gt (outer_bytes, inner_bytes))
33951763
RS
1654 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1655 return 0;
ddef6bc7 1656
33951763
RS
1657 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1658 return inner_bytes - outer_bytes;
1659 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1660 return 0;
1661 else
1662 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
ccba022b 1663}
eea50aa0 1664
33951763
RS
1665/* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1666 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1667
91914e56
RS
1668poly_uint64
1669subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
eea50aa0 1670{
91914e56 1671 gcc_assert (known_ge (inner_bytes, outer_bytes));
eea50aa0 1672
33951763
RS
1673 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1674 return 0;
1675 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1676 return inner_bytes - outer_bytes;
1677 else
1678 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1679 (inner_bytes - outer_bytes)
1680 * BITS_PER_UNIT);
eea50aa0 1681}
ccba022b 1682
23b2ce53
RS
1683/* Return 1 iff X, assumed to be a SUBREG,
1684 refers to the least significant part of its containing reg.
1685 If X is not a SUBREG, always return 1 (it is its own low part!). */
1686
1687int
fa233e34 1688subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1689{
1690 if (GET_CODE (x) != SUBREG)
1691 return 1;
a3a03040
RK
1692 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1693 return 0;
23b2ce53 1694
91914e56
RS
1695 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1696 GET_MODE (SUBREG_REG (x))),
1697 SUBREG_BYTE (x));
23b2ce53
RS
1698}
1699\f
ddef6bc7
JJ
1700/* Return subword OFFSET of operand OP.
1701 The word number, OFFSET, is interpreted as the word number starting
1702 at the low-order address. OFFSET 0 is the low-order word if not
1703 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1704
1705 If we cannot extract the required word, we return zero. Otherwise,
1706 an rtx corresponding to the requested word will be returned.
1707
1708 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1709 reload has completed, a valid address will always be returned. After
1710 reload, if a valid address cannot be returned, we return zero.
1711
1712 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1713 it is the responsibility of the caller.
1714
1715 MODE is the mode of OP in case it is a CONST_INT.
1716
1717 ??? This is still rather broken for some cases. The problem for the
1718 moment is that all callers of this thing provide no 'goal mode' to
1719 tell us to work with. This exists because all callers were written
0631e0bf
JH
1720 in a word based SUBREG world.
1721 Now use of this function can be deprecated by simplify_subreg in most
1722 cases.
1723 */
ddef6bc7
JJ
1724
1725rtx
fdbfe4e5
RS
1726operand_subword (rtx op, poly_uint64 offset, int validate_address,
1727 machine_mode mode)
ddef6bc7
JJ
1728{
1729 if (mode == VOIDmode)
1730 mode = GET_MODE (op);
1731
5b0264cb 1732 gcc_assert (mode != VOIDmode);
ddef6bc7 1733
30f7a378 1734 /* If OP is narrower than a word, fail. */
ddef6bc7 1735 if (mode != BLKmode
fdbfe4e5 1736 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
ddef6bc7
JJ
1737 return 0;
1738
30f7a378 1739 /* If we want a word outside OP, return zero. */
ddef6bc7 1740 if (mode != BLKmode
fdbfe4e5 1741 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
ddef6bc7
JJ
1742 return const0_rtx;
1743
ddef6bc7 1744 /* Form a new MEM at the requested address. */
3c0cb5de 1745 if (MEM_P (op))
ddef6bc7 1746 {
60564289 1747 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1748
f1ec5147 1749 if (! validate_address)
60564289 1750 return new_rtx;
f1ec5147
RK
1751
1752 else if (reload_completed)
ddef6bc7 1753 {
09e881c9
BE
1754 if (! strict_memory_address_addr_space_p (word_mode,
1755 XEXP (new_rtx, 0),
1756 MEM_ADDR_SPACE (op)))
f1ec5147 1757 return 0;
ddef6bc7 1758 }
f1ec5147 1759 else
60564289 1760 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1761 }
1762
0631e0bf
JH
1763 /* Rest can be handled by simplify_subreg. */
1764 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1765}
1766
535a42b1
NS
1767/* Similar to `operand_subword', but never return 0. If we can't
1768 extract the required subword, put OP into a register and try again.
1769 The second attempt must succeed. We always validate the address in
1770 this case.
23b2ce53
RS
1771
1772 MODE is the mode of OP, in case it is CONST_INT. */
1773
1774rtx
fdbfe4e5 1775operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
23b2ce53 1776{
ddef6bc7 1777 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1778
1779 if (result)
1780 return result;
1781
1782 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1783 {
1784 /* If this is a register which can not be accessed by words, copy it
1785 to a pseudo register. */
f8cfc6aa 1786 if (REG_P (op))
77e6b0eb
JC
1787 op = copy_to_reg (op);
1788 else
1789 op = force_reg (mode, op);
1790 }
23b2ce53 1791
ddef6bc7 1792 result = operand_subword (op, offset, 1, mode);
5b0264cb 1793 gcc_assert (result);
23b2ce53
RS
1794
1795 return result;
1796}
1797\f
d05d7551
RS
1798mem_attrs::mem_attrs ()
1799 : expr (NULL_TREE),
1800 offset (0),
1801 size (0),
1802 alias (0),
1803 align (0),
1804 addrspace (ADDR_SPACE_GENERIC),
1805 offset_known_p (false),
1806 size_known_p (false)
1807{}
1808
2b3493c8
AK
1809/* Returns 1 if both MEM_EXPR can be considered equal
1810 and 0 otherwise. */
1811
1812int
4f588890 1813mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1814{
1815 if (expr1 == expr2)
1816 return 1;
1817
1818 if (! expr1 || ! expr2)
1819 return 0;
1820
1821 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1822 return 0;
1823
55b34b5f 1824 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1825}
1826
805903b5
JJ
1827/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1828 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1829 -1 if not known. */
1830
1831int
d9223014 1832get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1833{
1834 tree expr;
d05d7551 1835 poly_uint64 offset;
805903b5
JJ
1836
1837 /* This function can't use
527210c4 1838 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1839 || (MAX (MEM_ALIGN (mem),
0eb77834 1840 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1841 < align))
1842 return -1;
1843 else
527210c4 1844 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1845 for two reasons:
1846 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1847 for <variable>. get_inner_reference doesn't handle it and
1848 even if it did, the alignment in that case needs to be determined
1849 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1850 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1851 isn't sufficiently aligned, the object it is in might be. */
1852 gcc_assert (MEM_P (mem));
1853 expr = MEM_EXPR (mem);
527210c4 1854 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1855 return -1;
1856
527210c4 1857 offset = MEM_OFFSET (mem);
805903b5
JJ
1858 if (DECL_P (expr))
1859 {
1860 if (DECL_ALIGN (expr) < align)
1861 return -1;
1862 }
1863 else if (INDIRECT_REF_P (expr))
1864 {
1865 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1866 return -1;
1867 }
1868 else if (TREE_CODE (expr) == COMPONENT_REF)
1869 {
1870 while (1)
1871 {
1872 tree inner = TREE_OPERAND (expr, 0);
1873 tree field = TREE_OPERAND (expr, 1);
1874 tree byte_offset = component_ref_field_offset (expr);
1875 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1876
d05d7551 1877 poly_uint64 suboffset;
805903b5 1878 if (!byte_offset
d05d7551 1879 || !poly_int_tree_p (byte_offset, &suboffset)
cc269bb6 1880 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1881 return -1;
1882
d05d7551 1883 offset += suboffset;
ae7e9ddd 1884 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1885
1886 if (inner == NULL_TREE)
1887 {
1888 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1889 < (unsigned int) align)
1890 return -1;
1891 break;
1892 }
1893 else if (DECL_P (inner))
1894 {
1895 if (DECL_ALIGN (inner) < align)
1896 return -1;
1897 break;
1898 }
1899 else if (TREE_CODE (inner) != COMPONENT_REF)
1900 return -1;
1901 expr = inner;
1902 }
1903 }
1904 else
1905 return -1;
1906
d05d7551
RS
1907 HOST_WIDE_INT misalign;
1908 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1909 return -1;
1910 return misalign;
805903b5
JJ
1911}
1912
6926c713 1913/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1914 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1915 if we are making a new object of this type. BITPOS is nonzero if
1916 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1917
1918void
502b8322 1919set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
d05d7551 1920 poly_int64 bitpos)
173b24b9 1921{
d05d7551 1922 poly_int64 apply_bitpos = 0;
173b24b9 1923 tree type;
f12144dd 1924 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1925 addr_space_t as;
173b24b9
RK
1926
1927 /* It can happen that type_for_mode was given a mode for which there
1928 is no language-level type. In which case it returns NULL, which
1929 we can see here. */
1930 if (t == NULL_TREE)
1931 return;
1932
1933 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1934 if (type == error_mark_node)
1935 return;
173b24b9 1936
173b24b9
RK
1937 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1938 wrong answer, as it assumes that DECL_RTL already has the right alias
1939 info. Callers should not set DECL_RTL until after the call to
1940 set_mem_attributes. */
5b0264cb 1941 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1942
738cc472 1943 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1944 front-end routine) and use it. */
f12144dd 1945 attrs.alias = get_alias_set (t);
173b24b9 1946
a5e9c810 1947 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1948 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1949
268f7033 1950 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1951 refattrs = MEM_ATTRS (ref);
1952 if (refattrs)
268f7033
UW
1953 {
1954 /* ??? Can this ever happen? Calling this routine on a MEM that
1955 already carries memory attributes should probably be invalid. */
f12144dd 1956 attrs.expr = refattrs->expr;
754c3d5d 1957 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1958 attrs.offset = refattrs->offset;
754c3d5d 1959 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1960 attrs.size = refattrs->size;
1961 attrs.align = refattrs->align;
268f7033
UW
1962 }
1963
1964 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1965 else
268f7033 1966 {
f12144dd
RS
1967 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1968 gcc_assert (!defattrs->expr);
754c3d5d 1969 gcc_assert (!defattrs->offset_known_p);
f12144dd 1970
268f7033 1971 /* Respect mode size. */
754c3d5d 1972 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1973 attrs.size = defattrs->size;
268f7033
UW
1974 /* ??? Is this really necessary? We probably should always get
1975 the size from the type below. */
1976
1977 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1978 if T is an object, always compute the object alignment below. */
f12144dd
RS
1979 if (TYPE_P (t))
1980 attrs.align = defattrs->align;
1981 else
1982 attrs.align = BITS_PER_UNIT;
268f7033
UW
1983 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1984 e.g. if the type carries an alignment attribute. Should we be
1985 able to simply always use TYPE_ALIGN? */
1986 }
1987
25b75a48
BE
1988 /* We can set the alignment from the type if we are making an object or if
1989 this is an INDIRECT_REF. */
1990 if (objectp || TREE_CODE (t) == INDIRECT_REF)
f12144dd 1991 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1992
738cc472 1993 /* If the size is known, we can set that. */
a787ccc3 1994 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1995
30b0317c
RB
1996 /* The address-space is that of the type. */
1997 as = TYPE_ADDR_SPACE (type);
1998
80965c18
RK
1999 /* If T is not a type, we may be able to deduce some more information about
2000 the expression. */
2001 if (! TYPE_P (t))
8ac61af7 2002 {
8476af98 2003 tree base;
389fdba0 2004
8ac61af7
RK
2005 if (TREE_THIS_VOLATILE (t))
2006 MEM_VOLATILE_P (ref) = 1;
173b24b9 2007
c56e3582
RK
2008 /* Now remove any conversions: they don't change what the underlying
2009 object is. Likewise for SAVE_EXPR. */
1043771b 2010 while (CONVERT_EXPR_P (t)
c56e3582
RK
2011 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2012 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
2013 t = TREE_OPERAND (t, 0);
2014
4994da65
RG
2015 /* Note whether this expression can trap. */
2016 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2017
2018 base = get_base_address (t);
f18a7b25
MJ
2019 if (base)
2020 {
2021 if (DECL_P (base)
2022 && TREE_READONLY (base)
2023 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2024 && !TREE_THIS_VOLATILE (base))
2025 MEM_READONLY_P (ref) = 1;
2026
2027 /* Mark static const strings readonly as well. */
2028 if (TREE_CODE (base) == STRING_CST
2029 && TREE_READONLY (base)
2030 && TREE_STATIC (base))
2031 MEM_READONLY_P (ref) = 1;
2032
30b0317c 2033 /* Address-space information is on the base object. */
f18a7b25
MJ
2034 if (TREE_CODE (base) == MEM_REF
2035 || TREE_CODE (base) == TARGET_MEM_REF)
2036 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2037 0))));
2038 else
2039 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2040 }
ba30e50d 2041
2039d7aa
RH
2042 /* If this expression uses it's parent's alias set, mark it such
2043 that we won't change it. */
b4ada065 2044 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
2045 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2046
8ac61af7
RK
2047 /* If this is a decl, set the attributes of the MEM from it. */
2048 if (DECL_P (t))
2049 {
f12144dd 2050 attrs.expr = t;
754c3d5d
RS
2051 attrs.offset_known_p = true;
2052 attrs.offset = 0;
6f1087be 2053 apply_bitpos = bitpos;
a787ccc3 2054 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
2055 }
2056
30b0317c 2057 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 2058 else if (CONSTANT_CLASS_P (t))
30b0317c 2059 ;
998d7deb 2060
a787ccc3
RS
2061 /* If this is a field reference, record it. */
2062 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 2063 {
f12144dd 2064 attrs.expr = t;
754c3d5d
RS
2065 attrs.offset_known_p = true;
2066 attrs.offset = 0;
6f1087be 2067 apply_bitpos = bitpos;
a787ccc3
RS
2068 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2069 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
2070 }
2071
2072 /* If this is an array reference, look for an outer field reference. */
2073 else if (TREE_CODE (t) == ARRAY_REF)
2074 {
2075 tree off_tree = size_zero_node;
1b1838b6
JW
2076 /* We can't modify t, because we use it at the end of the
2077 function. */
2078 tree t2 = t;
998d7deb
RH
2079
2080 do
2081 {
1b1838b6 2082 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
2083 tree low_bound = array_ref_low_bound (t2);
2084 tree unit_size = array_ref_element_size (t2);
2567406a
JH
2085
2086 /* We assume all arrays have sizes that are a multiple of a byte.
2087 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
2088 index, then convert to sizetype and multiply by the size of
2089 the array element. */
2090 if (! integer_zerop (low_bound))
4845b383
KH
2091 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2092 index, low_bound);
2567406a 2093
44de5aeb 2094 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
2095 size_binop (MULT_EXPR,
2096 fold_convert (sizetype,
2097 index),
44de5aeb
RK
2098 unit_size),
2099 off_tree);
1b1838b6 2100 t2 = TREE_OPERAND (t2, 0);
998d7deb 2101 }
1b1838b6 2102 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 2103
30b0317c 2104 if (DECL_P (t2)
12ead254
RB
2105 || (TREE_CODE (t2) == COMPONENT_REF
2106 /* For trailing arrays t2 doesn't have a size that
2107 covers all valid accesses. */
c3e46927 2108 && ! array_at_struct_end_p (t)))
998d7deb 2109 {
f12144dd 2110 attrs.expr = t2;
754c3d5d 2111 attrs.offset_known_p = false;
d05d7551 2112 if (poly_int_tree_p (off_tree, &attrs.offset))
6f1087be 2113 {
754c3d5d 2114 attrs.offset_known_p = true;
6f1087be
RH
2115 apply_bitpos = bitpos;
2116 }
998d7deb 2117 }
30b0317c 2118 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
2119 }
2120
56c47f22 2121 /* If this is an indirect reference, record it. */
70f34814 2122 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 2123 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 2124 {
f12144dd 2125 attrs.expr = t;
754c3d5d
RS
2126 attrs.offset_known_p = true;
2127 attrs.offset = 0;
56c47f22
RG
2128 apply_bitpos = bitpos;
2129 }
2130
30b0317c
RB
2131 /* Compute the alignment. */
2132 unsigned int obj_align;
2133 unsigned HOST_WIDE_INT obj_bitpos;
2134 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
d05d7551
RS
2135 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2136 if (diff_align != 0)
2137 obj_align = MIN (obj_align, diff_align);
30b0317c 2138 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
2139 }
2140
d05d7551
RS
2141 poly_uint64 const_size;
2142 if (poly_int_tree_p (new_size, &const_size))
a787ccc3
RS
2143 {
2144 attrs.size_known_p = true;
d05d7551 2145 attrs.size = const_size;
a787ccc3
RS
2146 }
2147
15c812e3 2148 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
2149 bit position offset. Similarly, increase the size of the accessed
2150 object to contain the negative offset. */
d05d7551 2151 if (maybe_ne (apply_bitpos, 0))
8c317c5f 2152 {
754c3d5d 2153 gcc_assert (attrs.offset_known_p);
d05d7551
RS
2154 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2155 attrs.offset -= bytepos;
754c3d5d 2156 if (attrs.size_known_p)
d05d7551 2157 attrs.size += bytepos;
8c317c5f 2158 }
6f1087be 2159
8ac61af7 2160 /* Now set the attributes we computed above. */
f18a7b25 2161 attrs.addrspace = as;
f12144dd 2162 set_mem_attrs (ref, &attrs);
173b24b9
RK
2163}
2164
6f1087be 2165void
502b8322 2166set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
2167{
2168 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2169}
2170
173b24b9
RK
2171/* Set the alias set of MEM to SET. */
2172
2173void
4862826d 2174set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 2175{
173b24b9 2176 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 2177 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
d05d7551 2178 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2179 attrs.alias = set;
2180 set_mem_attrs (mem, &attrs);
09e881c9
BE
2181}
2182
2183/* Set the address space of MEM to ADDRSPACE (target-defined). */
2184
2185void
2186set_mem_addr_space (rtx mem, addr_space_t addrspace)
2187{
d05d7551 2188 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2189 attrs.addrspace = addrspace;
2190 set_mem_attrs (mem, &attrs);
173b24b9 2191}
738cc472 2192
d022d93e 2193/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
2194
2195void
502b8322 2196set_mem_align (rtx mem, unsigned int align)
738cc472 2197{
d05d7551 2198 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2199 attrs.align = align;
2200 set_mem_attrs (mem, &attrs);
738cc472 2201}
1285011e 2202
998d7deb 2203/* Set the expr for MEM to EXPR. */
1285011e
RK
2204
2205void
502b8322 2206set_mem_expr (rtx mem, tree expr)
1285011e 2207{
d05d7551 2208 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2209 attrs.expr = expr;
2210 set_mem_attrs (mem, &attrs);
1285011e 2211}
998d7deb
RH
2212
2213/* Set the offset of MEM to OFFSET. */
2214
2215void
d05d7551 2216set_mem_offset (rtx mem, poly_int64 offset)
998d7deb 2217{
d05d7551 2218 mem_attrs attrs (*get_mem_attrs (mem));
754c3d5d
RS
2219 attrs.offset_known_p = true;
2220 attrs.offset = offset;
527210c4
RS
2221 set_mem_attrs (mem, &attrs);
2222}
2223
2224/* Clear the offset of MEM. */
2225
2226void
2227clear_mem_offset (rtx mem)
2228{
d05d7551 2229 mem_attrs attrs (*get_mem_attrs (mem));
754c3d5d 2230 attrs.offset_known_p = false;
f12144dd 2231 set_mem_attrs (mem, &attrs);
35aff10b
AM
2232}
2233
2234/* Set the size of MEM to SIZE. */
2235
2236void
d05d7551 2237set_mem_size (rtx mem, poly_int64 size)
35aff10b 2238{
d05d7551 2239 mem_attrs attrs (*get_mem_attrs (mem));
754c3d5d
RS
2240 attrs.size_known_p = true;
2241 attrs.size = size;
f5541398
RS
2242 set_mem_attrs (mem, &attrs);
2243}
2244
2245/* Clear the size of MEM. */
2246
2247void
2248clear_mem_size (rtx mem)
2249{
d05d7551 2250 mem_attrs attrs (*get_mem_attrs (mem));
754c3d5d 2251 attrs.size_known_p = false;
f12144dd 2252 set_mem_attrs (mem, &attrs);
998d7deb 2253}
173b24b9 2254\f
738cc472
RK
2255/* Return a memory reference like MEMREF, but with its mode changed to MODE
2256 and its address changed to ADDR. (VOIDmode means don't change the mode.
2257 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2258 returned memory location is required to be valid. INPLACE is true if any
2259 changes can be made directly to MEMREF or false if MEMREF must be treated
2260 as immutable.
2261
2262 The memory attributes are not changed. */
23b2ce53 2263
738cc472 2264static rtx
ef4bddc2 2265change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
23b33725 2266 bool inplace)
23b2ce53 2267{
09e881c9 2268 addr_space_t as;
60564289 2269 rtx new_rtx;
23b2ce53 2270
5b0264cb 2271 gcc_assert (MEM_P (memref));
09e881c9 2272 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2273 if (mode == VOIDmode)
2274 mode = GET_MODE (memref);
2275 if (addr == 0)
2276 addr = XEXP (memref, 0);
a74ff877 2277 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2278 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2279 return memref;
23b2ce53 2280
91c5ee5b
VM
2281 /* Don't validate address for LRA. LRA can make the address valid
2282 by itself in most efficient way. */
2283 if (validate && !lra_in_progress)
23b2ce53 2284 {
f1ec5147 2285 if (reload_in_progress || reload_completed)
09e881c9 2286 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2287 else
09e881c9 2288 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2289 }
750c9258 2290
9b04c6a8
RK
2291 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2292 return memref;
2293
23b33725
RS
2294 if (inplace)
2295 {
2296 XEXP (memref, 0) = addr;
2297 return memref;
2298 }
2299
60564289
KG
2300 new_rtx = gen_rtx_MEM (mode, addr);
2301 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2302 return new_rtx;
23b2ce53 2303}
792760b9 2304
738cc472
RK
2305/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2306 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2307
2308rtx
ef4bddc2 2309change_address (rtx memref, machine_mode mode, rtx addr)
f4ef873c 2310{
23b33725 2311 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
ef4bddc2 2312 machine_mode mmode = GET_MODE (new_rtx);
d05d7551 2313 struct mem_attrs *defattrs;
4e44c1ef 2314
d05d7551 2315 mem_attrs attrs (*get_mem_attrs (memref));
f12144dd 2316 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2317 attrs.expr = NULL_TREE;
2318 attrs.offset_known_p = false;
2319 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2320 attrs.size = defattrs->size;
2321 attrs.align = defattrs->align;
c2f7bcc3 2322
fdb1c7b3 2323 /* If there are no changes, just return the original memory reference. */
60564289 2324 if (new_rtx == memref)
4e44c1ef 2325 {
f12144dd 2326 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2327 return new_rtx;
4e44c1ef 2328
60564289
KG
2329 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2330 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2331 }
fdb1c7b3 2332
f12144dd 2333 set_mem_attrs (new_rtx, &attrs);
60564289 2334 return new_rtx;
f4ef873c 2335}
792760b9 2336
738cc472
RK
2337/* Return a memory reference like MEMREF, but with its mode changed
2338 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2339 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2340 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2341 and the caller is responsible for adjusting MEMREF base register.
2342 If ADJUST_OBJECT is zero, the underlying object associated with the
2343 memory reference is left unchanged and the caller is responsible for
2344 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2345 the underlying object, even partially, then the object is dropped.
2346 SIZE, if nonzero, is the size of an access in cases where MODE
2347 has no inherent size. */
f1ec5147
RK
2348
2349rtx
d05d7551 2350adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
5f2cbd0d 2351 int validate, int adjust_address, int adjust_object,
d05d7551 2352 poly_int64 size)
f1ec5147 2353{
823e3574 2354 rtx addr = XEXP (memref, 0);
60564289 2355 rtx new_rtx;
095a2d76 2356 scalar_int_mode address_mode;
d05d7551 2357 struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
f12144dd 2358 unsigned HOST_WIDE_INT max_align;
0207fa90 2359#ifdef POINTERS_EXTEND_UNSIGNED
095a2d76 2360 scalar_int_mode pointer_mode
0207fa90
EB
2361 = targetm.addr_space.pointer_mode (attrs.addrspace);
2362#endif
823e3574 2363
ee88e690
EB
2364 /* VOIDmode means no mode change for change_address_1. */
2365 if (mode == VOIDmode)
2366 mode = GET_MODE (memref);
2367
5f2cbd0d
RS
2368 /* Take the size of non-BLKmode accesses from the mode. */
2369 defattrs = mode_mem_attrs[(int) mode];
2370 if (defattrs->size_known_p)
2371 size = defattrs->size;
2372
fdb1c7b3 2373 /* If there are no changes, just return the original memory reference. */
d05d7551
RS
2374 if (mode == GET_MODE (memref)
2375 && known_eq (offset, 0)
2376 && (known_eq (size, 0)
2377 || (attrs.size_known_p && known_eq (attrs.size, size)))
f12144dd
RS
2378 && (!validate || memory_address_addr_space_p (mode, addr,
2379 attrs.addrspace)))
fdb1c7b3
JH
2380 return memref;
2381
d14419e4 2382 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2383 This may happen even if offset is nonzero -- consider
d14419e4
RH
2384 (plus (plus reg reg) const_int) -- so do this always. */
2385 addr = copy_rtx (addr);
2386
a6fe9ed4
JM
2387 /* Convert a possibly large offset to a signed value within the
2388 range of the target address space. */
372d6395 2389 address_mode = get_address_mode (memref);
d05d7551 2390 offset = trunc_int_for_mode (offset, address_mode);
a6fe9ed4 2391
5ef0b50d 2392 if (adjust_address)
4a78c787
RH
2393 {
2394 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2395 object, we can merge it into the LO_SUM. */
d05d7551
RS
2396 if (GET_MODE (memref) != BLKmode
2397 && GET_CODE (addr) == LO_SUM
2398 && known_in_range_p (offset,
2399 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2400 / BITS_PER_UNIT)))
d4ebfa65 2401 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2402 plus_constant (address_mode,
2403 XEXP (addr, 1), offset));
0207fa90
EB
2404#ifdef POINTERS_EXTEND_UNSIGNED
2405 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2406 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2407 the fact that pointers are not allowed to overflow. */
2408 else if (POINTERS_EXTEND_UNSIGNED > 0
2409 && GET_CODE (addr) == ZERO_EXTEND
2410 && GET_MODE (XEXP (addr, 0)) == pointer_mode
d05d7551 2411 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
0207fa90
EB
2412 addr = gen_rtx_ZERO_EXTEND (address_mode,
2413 plus_constant (pointer_mode,
2414 XEXP (addr, 0), offset));
2415#endif
4a78c787 2416 else
0a81f074 2417 addr = plus_constant (address_mode, addr, offset);
4a78c787 2418 }
823e3574 2419
23b33725 2420 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2421
09efeca1
PB
2422 /* If the address is a REG, change_address_1 rightfully returns memref,
2423 but this would destroy memref's MEM_ATTRS. */
d05d7551 2424 if (new_rtx == memref && maybe_ne (offset, 0))
09efeca1
PB
2425 new_rtx = copy_rtx (new_rtx);
2426
5ef0b50d
EB
2427 /* Conservatively drop the object if we don't know where we start from. */
2428 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2429 {
2430 attrs.expr = NULL_TREE;
2431 attrs.alias = 0;
2432 }
2433
738cc472
RK
2434 /* Compute the new values of the memory attributes due to this adjustment.
2435 We add the offsets and update the alignment. */
754c3d5d 2436 if (attrs.offset_known_p)
5ef0b50d
EB
2437 {
2438 attrs.offset += offset;
2439
2440 /* Drop the object if the new left end is not within its bounds. */
d05d7551 2441 if (adjust_object && maybe_lt (attrs.offset, 0))
5ef0b50d
EB
2442 {
2443 attrs.expr = NULL_TREE;
2444 attrs.alias = 0;
2445 }
2446 }
738cc472 2447
03bf2c23
RK
2448 /* Compute the new alignment by taking the MIN of the alignment and the
2449 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2450 if zero. */
d05d7551 2451 if (maybe_ne (offset, 0))
f12144dd 2452 {
d05d7551 2453 max_align = known_alignment (offset) * BITS_PER_UNIT;
f12144dd
RS
2454 attrs.align = MIN (attrs.align, max_align);
2455 }
738cc472 2456
d05d7551 2457 if (maybe_ne (size, 0))
754c3d5d 2458 {
5ef0b50d 2459 /* Drop the object if the new right end is not within its bounds. */
d05d7551 2460 if (adjust_object && maybe_gt (offset + size, attrs.size))
5ef0b50d
EB
2461 {
2462 attrs.expr = NULL_TREE;
2463 attrs.alias = 0;
2464 }
754c3d5d 2465 attrs.size_known_p = true;
5f2cbd0d 2466 attrs.size = size;
754c3d5d
RS
2467 }
2468 else if (attrs.size_known_p)
5ef0b50d 2469 {
5f2cbd0d 2470 gcc_assert (!adjust_object);
5ef0b50d 2471 attrs.size -= offset;
5f2cbd0d
RS
2472 /* ??? The store_by_pieces machinery generates negative sizes,
2473 so don't assert for that here. */
5ef0b50d 2474 }
10b76d73 2475
f12144dd 2476 set_mem_attrs (new_rtx, &attrs);
738cc472 2477
60564289 2478 return new_rtx;
f1ec5147
RK
2479}
2480
630036c6
JJ
2481/* Return a memory reference like MEMREF, but with its mode changed
2482 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2483 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2484 nonzero, the memory address is forced to be valid. */
2485
2486rtx
ef4bddc2 2487adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
d05d7551 2488 poly_int64 offset, int validate)
630036c6 2489{
23b33725 2490 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2491 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2492}
2493
8ac61af7
RK
2494/* Return a memory reference like MEMREF, but whose address is changed by
2495 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2496 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2497
2498rtx
502b8322 2499offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2500{
60564289 2501 rtx new_rtx, addr = XEXP (memref, 0);
ef4bddc2 2502 machine_mode address_mode;
d05d7551 2503 struct mem_attrs *defattrs;
e3c8ea67 2504
d05d7551 2505 mem_attrs attrs (*get_mem_attrs (memref));
372d6395 2506 address_mode = get_address_mode (memref);
d4ebfa65 2507 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2508
68252e27 2509 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2510 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2511
2512 However, if we did go and rearrange things, we can wind up not
2513 being able to recognize the magic around pic_offset_table_rtx.
2514 This stuff is fragile, and is yet another example of why it is
2515 bad to expose PIC machinery too early. */
f12144dd
RS
2516 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2517 attrs.addrspace)
e3c8ea67
RH
2518 && GET_CODE (addr) == PLUS
2519 && XEXP (addr, 0) == pic_offset_table_rtx)
2520 {
2521 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2522 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2523 }
2524
60564289 2525 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2526 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2527
fdb1c7b3 2528 /* If there are no changes, just return the original memory reference. */
60564289
KG
2529 if (new_rtx == memref)
2530 return new_rtx;
fdb1c7b3 2531
0d4903b8
RK
2532 /* Update the alignment to reflect the offset. Reset the offset, which
2533 we don't know. */
754c3d5d
RS
2534 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2535 attrs.offset_known_p = false;
2536 attrs.size_known_p = defattrs->size_known_p;
2537 attrs.size = defattrs->size;
f12144dd
RS
2538 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2539 set_mem_attrs (new_rtx, &attrs);
60564289 2540 return new_rtx;
0d4903b8 2541}
68252e27 2542
792760b9
RK
2543/* Return a memory reference like MEMREF, but with its address changed to
2544 ADDR. The caller is asserting that the actual piece of memory pointed
2545 to is the same, just the form of the address is being changed, such as
23b33725
RS
2546 by putting something into a register. INPLACE is true if any changes
2547 can be made directly to MEMREF or false if MEMREF must be treated as
2548 immutable. */
792760b9
RK
2549
2550rtx
23b33725 2551replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2552{
738cc472
RK
2553 /* change_address_1 copies the memory attribute structure without change
2554 and that's exactly what we want here. */
40c0668b 2555 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2556 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2557}
738cc472 2558
f1ec5147
RK
2559/* Likewise, but the reference is not required to be valid. */
2560
2561rtx
23b33725 2562replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2563{
23b33725 2564 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2565}
e7dfe4bb
RH
2566
2567/* Return a memory reference like MEMREF, but with its mode widened to
2568 MODE and offset by OFFSET. This would be used by targets that e.g.
2569 cannot issue QImode memory operations and have to use SImode memory
2570 operations plus masking logic. */
2571
2572rtx
d05d7551 2573widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
e7dfe4bb 2574{
5f2cbd0d 2575 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
cf098191 2576 poly_uint64 size = GET_MODE_SIZE (mode);
e7dfe4bb 2577
fdb1c7b3 2578 /* If there are no changes, just return the original memory reference. */
60564289
KG
2579 if (new_rtx == memref)
2580 return new_rtx;
fdb1c7b3 2581
d05d7551 2582 mem_attrs attrs (*get_mem_attrs (new_rtx));
f12144dd 2583
e7dfe4bb
RH
2584 /* If we don't know what offset we were at within the expression, then
2585 we can't know if we've overstepped the bounds. */
754c3d5d 2586 if (! attrs.offset_known_p)
f12144dd 2587 attrs.expr = NULL_TREE;
e7dfe4bb 2588
f12144dd 2589 while (attrs.expr)
e7dfe4bb 2590 {
f12144dd 2591 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2592 {
f12144dd
RS
2593 tree field = TREE_OPERAND (attrs.expr, 1);
2594 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2595
2596 if (! DECL_SIZE_UNIT (field))
2597 {
f12144dd 2598 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2599 break;
2600 }
2601
2602 /* Is the field at least as large as the access? If so, ok,
2603 otherwise strip back to the containing structure. */
d05d7551
RS
2604 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2605 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2606 && known_ge (attrs.offset, 0))
e7dfe4bb
RH
2607 break;
2608
d05d7551
RS
2609 poly_uint64 suboffset;
2610 if (!poly_int_tree_p (offset, &suboffset))
e7dfe4bb 2611 {
f12144dd 2612 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2613 break;
2614 }
2615
f12144dd 2616 attrs.expr = TREE_OPERAND (attrs.expr, 0);
d05d7551 2617 attrs.offset += suboffset;
ae7e9ddd 2618 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2619 / BITS_PER_UNIT);
e7dfe4bb
RH
2620 }
2621 /* Similarly for the decl. */
f12144dd
RS
2622 else if (DECL_P (attrs.expr)
2623 && DECL_SIZE_UNIT (attrs.expr)
d05d7551
RS
2624 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2625 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2626 size)
2627 && known_ge (attrs.offset, 0))
e7dfe4bb
RH
2628 break;
2629 else
2630 {
2631 /* The widened memory access overflows the expression, which means
2632 that it could alias another expression. Zap it. */
f12144dd 2633 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2634 break;
2635 }
2636 }
2637
f12144dd 2638 if (! attrs.expr)
754c3d5d 2639 attrs.offset_known_p = false;
e7dfe4bb
RH
2640
2641 /* The widened memory may alias other stuff, so zap the alias set. */
2642 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2643 attrs.alias = 0;
754c3d5d
RS
2644 attrs.size_known_p = true;
2645 attrs.size = size;
f12144dd 2646 set_mem_attrs (new_rtx, &attrs);
60564289 2647 return new_rtx;
e7dfe4bb 2648}
23b2ce53 2649\f
f6129d66
RH
2650/* A fake decl that is used as the MEM_EXPR of spill slots. */
2651static GTY(()) tree spill_slot_decl;
2652
3d7e23f6
RH
2653tree
2654get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2655{
2656 tree d = spill_slot_decl;
2657 rtx rd;
2658
3d7e23f6 2659 if (d || !force_build_p)
f6129d66
RH
2660 return d;
2661
c2255bc4
AH
2662 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2663 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2664 DECL_ARTIFICIAL (d) = 1;
2665 DECL_IGNORED_P (d) = 1;
2666 TREE_USED (d) = 1;
f6129d66
RH
2667 spill_slot_decl = d;
2668
2669 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2670 MEM_NOTRAP_P (rd) = 1;
d05d7551 2671 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
f12144dd
RS
2672 attrs.alias = new_alias_set ();
2673 attrs.expr = d;
2674 set_mem_attrs (rd, &attrs);
f6129d66
RH
2675 SET_DECL_RTL (d, rd);
2676
2677 return d;
2678}
2679
2680/* Given MEM, a result from assign_stack_local, fill in the memory
2681 attributes as appropriate for a register allocator spill slot.
2682 These slots are not aliasable by other memory. We arrange for
2683 them all to use a single MEM_EXPR, so that the aliasing code can
2684 work properly in the case of shared spill slots. */
2685
2686void
2687set_mem_attrs_for_spill (rtx mem)
2688{
f12144dd 2689 rtx addr;
f6129d66 2690
d05d7551 2691 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2692 attrs.expr = get_spill_slot_decl (true);
2693 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2694 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2695
2696 /* We expect the incoming memory to be of the form:
2697 (mem:MODE (plus (reg sfp) (const_int offset)))
2698 with perhaps the plus missing for offset = 0. */
2699 addr = XEXP (mem, 0);
754c3d5d 2700 attrs.offset_known_p = true;
d05d7551 2701 strip_offset (addr, &attrs.offset);
f6129d66 2702
f12144dd 2703 set_mem_attrs (mem, &attrs);
f6129d66
RH
2704 MEM_NOTRAP_P (mem) = 1;
2705}
2706\f
23b2ce53
RS
2707/* Return a newly created CODE_LABEL rtx with a unique label number. */
2708
7dcc3ab5 2709rtx_code_label *
502b8322 2710gen_label_rtx (void)
23b2ce53 2711{
7dcc3ab5
DM
2712 return as_a <rtx_code_label *> (
2713 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2714 NULL, label_num++, NULL));
23b2ce53
RS
2715}
2716\f
2717/* For procedure integration. */
2718
23b2ce53 2719/* Install new pointers to the first and last insns in the chain.
86fe05e0 2720 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2721 Used for an inline-procedure after copying the insn chain. */
2722
2723void
fee3e72c 2724set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2725{
fee3e72c 2726 rtx_insn *insn;
86fe05e0 2727
5936d944
JH
2728 set_first_insn (first);
2729 set_last_insn (last);
86fe05e0
RK
2730 cur_insn_uid = 0;
2731
b5b8b0ac
AO
2732 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2733 {
2734 int debug_count = 0;
2735
2736 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2737 cur_debug_insn_uid = 0;
2738
2739 for (insn = first; insn; insn = NEXT_INSN (insn))
2740 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2741 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2742 else
2743 {
2744 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2745 if (DEBUG_INSN_P (insn))
2746 debug_count++;
2747 }
2748
2749 if (debug_count)
2750 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2751 else
2752 cur_debug_insn_uid++;
2753 }
2754 else
2755 for (insn = first; insn; insn = NEXT_INSN (insn))
2756 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2757
2758 cur_insn_uid++;
23b2ce53 2759}
23b2ce53 2760\f
750c9258 2761/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2762 structure. This routine should only be called once. */
23b2ce53 2763
fd743bc1 2764static void
6bb9bf63 2765unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2766{
d1b81779 2767 /* Unshare just about everything else. */
2c07f13b 2768 unshare_all_rtl_in_chain (insn);
750c9258 2769
23b2ce53
RS
2770 /* Make sure the addresses of stack slots found outside the insn chain
2771 (such as, in DECL_RTL of a variable) are not shared
2772 with the insn chain.
2773
2774 This special care is necessary when the stack slot MEM does not
2775 actually appear in the insn chain. If it does appear, its address
2776 is unshared from all else at that point. */
8c39f8ae
TS
2777 unsigned int i;
2778 rtx temp;
2779 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2780 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
23b2ce53
RS
2781}
2782
750c9258 2783/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2784 structure, again. This is a fairly expensive thing to do so it
2785 should be done sparingly. */
2786
2787void
6bb9bf63 2788unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2789{
6bb9bf63 2790 rtx_insn *p;
624c87aa
RE
2791 tree decl;
2792
d1b81779 2793 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2794 if (INSN_P (p))
d1b81779
GK
2795 {
2796 reset_used_flags (PATTERN (p));
2797 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2798 if (CALL_P (p))
2799 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2800 }
624c87aa 2801
2d4aecb3 2802 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2803 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2804
624c87aa 2805 /* Make sure that virtual parameters are not shared. */
910ad8de 2806 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2807 set_used_flags (DECL_RTL (decl));
624c87aa 2808
8c39f8ae
TS
2809 rtx temp;
2810 unsigned int i;
2811 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2812 reset_used_flags (temp);
624c87aa 2813
b4aaa77b 2814 unshare_all_rtl_1 (insn);
fd743bc1
PB
2815}
2816
c2924966 2817unsigned int
fd743bc1
PB
2818unshare_all_rtl (void)
2819{
b4aaa77b 2820 unshare_all_rtl_1 (get_insns ());
60ebe8ce
JJ
2821
2822 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2823 {
2824 if (DECL_RTL_SET_P (decl))
2825 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2826 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2827 }
2828
c2924966 2829 return 0;
d1b81779
GK
2830}
2831
ef330312 2832
2c07f13b
JH
2833/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2834 Recursively does the same for subexpressions. */
2835
2836static void
2837verify_rtx_sharing (rtx orig, rtx insn)
2838{
2839 rtx x = orig;
2840 int i;
2841 enum rtx_code code;
2842 const char *format_ptr;
2843
2844 if (x == 0)
2845 return;
2846
2847 code = GET_CODE (x);
2848
2849 /* These types may be freely shared. */
2850
2851 switch (code)
2852 {
2853 case REG:
0ca5af51
AO
2854 case DEBUG_EXPR:
2855 case VALUE:
d8116890 2856 CASE_CONST_ANY:
2c07f13b
JH
2857 case SYMBOL_REF:
2858 case LABEL_REF:
2859 case CODE_LABEL:
2860 case PC:
2861 case CC0:
3810076b 2862 case RETURN:
26898771 2863 case SIMPLE_RETURN:
2c07f13b 2864 case SCRATCH:
3e89ed8d 2865 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2866 return;
3e89ed8d 2867 case CLOBBER:
c5c5ba89
JH
2868 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2869 clobbers or clobbers of hard registers that originated as pseudos.
2870 This is needed to allow safe register renaming. */
d7ae3739
EB
2871 if (REG_P (XEXP (x, 0))
2872 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2873 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3e89ed8d
JH
2874 return;
2875 break;
2c07f13b
JH
2876
2877 case CONST:
6fb5fa3c 2878 if (shared_const_p (orig))
2c07f13b
JH
2879 return;
2880 break;
2881
2882 case MEM:
2883 /* A MEM is allowed to be shared if its address is constant. */
2884 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2885 || reload_completed || reload_in_progress)
2886 return;
2887
2888 break;
2889
2890 default:
2891 break;
2892 }
2893
2894 /* This rtx may not be shared. If it has already been seen,
2895 replace it with a copy of itself. */
b2b29377 2896 if (flag_checking && RTX_FLAG (x, used))
2c07f13b 2897 {
ab532386 2898 error ("invalid rtl sharing found in the insn");
2c07f13b 2899 debug_rtx (insn);
ab532386 2900 error ("shared rtx");
2c07f13b 2901 debug_rtx (x);
ab532386 2902 internal_error ("internal consistency failure");
2c07f13b 2903 }
1a2caa7a 2904 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2905
2c07f13b
JH
2906 RTX_FLAG (x, used) = 1;
2907
6614fd40 2908 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2909
2910 format_ptr = GET_RTX_FORMAT (code);
2911
2912 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2913 {
2914 switch (*format_ptr++)
2915 {
2916 case 'e':
2917 verify_rtx_sharing (XEXP (x, i), insn);
2918 break;
2919
2920 case 'E':
2921 if (XVEC (x, i) != NULL)
2922 {
2923 int j;
2924 int len = XVECLEN (x, i);
2925
2926 for (j = 0; j < len; j++)
2927 {
1a2caa7a
NS
2928 /* We allow sharing of ASM_OPERANDS inside single
2929 instruction. */
2c07f13b 2930 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2931 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2932 == ASM_OPERANDS))
2c07f13b
JH
2933 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2934 else
2935 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2936 }
2937 }
2938 break;
2939 }
2940 }
2941 return;
2942}
2943
0e0f87d4
SB
2944/* Reset used-flags for INSN. */
2945
2946static void
2947reset_insn_used_flags (rtx insn)
2948{
2949 gcc_assert (INSN_P (insn));
2950 reset_used_flags (PATTERN (insn));
2951 reset_used_flags (REG_NOTES (insn));
2952 if (CALL_P (insn))
2953 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2954}
2955
a24243a0 2956/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2957
a24243a0
AK
2958static void
2959reset_all_used_flags (void)
2c07f13b 2960{
dc01c3d1 2961 rtx_insn *p;
2c07f13b
JH
2962
2963 for (p = get_insns (); p; p = NEXT_INSN (p))
2964 if (INSN_P (p))
2965 {
0e0f87d4
SB
2966 rtx pat = PATTERN (p);
2967 if (GET_CODE (pat) != SEQUENCE)
2968 reset_insn_used_flags (p);
2969 else
2954a813 2970 {
0e0f87d4
SB
2971 gcc_assert (REG_NOTES (p) == NULL);
2972 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2973 {
2974 rtx insn = XVECEXP (pat, 0, i);
2975 if (INSN_P (insn))
2976 reset_insn_used_flags (insn);
2977 }
2954a813 2978 }
2c07f13b 2979 }
a24243a0
AK
2980}
2981
0e0f87d4
SB
2982/* Verify sharing in INSN. */
2983
2984static void
2985verify_insn_sharing (rtx insn)
2986{
2987 gcc_assert (INSN_P (insn));
4b498f72
JJ
2988 verify_rtx_sharing (PATTERN (insn), insn);
2989 verify_rtx_sharing (REG_NOTES (insn), insn);
0e0f87d4 2990 if (CALL_P (insn))
4b498f72 2991 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
0e0f87d4
SB
2992}
2993
a24243a0
AK
2994/* Go through all the RTL insn bodies and check that there is no unexpected
2995 sharing in between the subexpressions. */
2996
2997DEBUG_FUNCTION void
2998verify_rtl_sharing (void)
2999{
dc01c3d1 3000 rtx_insn *p;
a24243a0
AK
3001
3002 timevar_push (TV_VERIFY_RTL_SHARING);
3003
3004 reset_all_used_flags ();
2c07f13b
JH
3005
3006 for (p = get_insns (); p; p = NEXT_INSN (p))
3007 if (INSN_P (p))
3008 {
0e0f87d4
SB
3009 rtx pat = PATTERN (p);
3010 if (GET_CODE (pat) != SEQUENCE)
3011 verify_insn_sharing (p);
3012 else
3013 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
3014 {
3015 rtx insn = XVECEXP (pat, 0, i);
3016 if (INSN_P (insn))
3017 verify_insn_sharing (insn);
3018 }
2c07f13b 3019 }
a222c01a 3020
a24243a0
AK
3021 reset_all_used_flags ();
3022
a222c01a 3023 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
3024}
3025
d1b81779
GK
3026/* Go through all the RTL insn bodies and copy any invalid shared structure.
3027 Assumes the mark bits are cleared at entry. */
3028
2c07f13b 3029void
dc01c3d1 3030unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
3031{
3032 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 3033 if (INSN_P (insn))
d1b81779
GK
3034 {
3035 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3036 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
3037 if (CALL_P (insn))
3038 CALL_INSN_FUNCTION_USAGE (insn)
3039 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
3040 }
3041}
3042
2d4aecb3 3043/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
3044 shared. We never replace the DECL_RTLs themselves with a copy,
3045 but expressions mentioned into a DECL_RTL cannot be shared with
3046 expressions in the instruction stream.
3047
3048 Note that reload may convert pseudo registers into memories in-place.
3049 Pseudo registers are always shared, but MEMs never are. Thus if we
3050 reset the used flags on MEMs in the instruction stream, we must set
3051 them again on MEMs that appear in DECL_RTLs. */
3052
2d4aecb3 3053static void
5eb2a9f2 3054set_used_decls (tree blk)
2d4aecb3
AO
3055{
3056 tree t;
3057
3058 /* Mark decls. */
910ad8de 3059 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 3060 if (DECL_RTL_SET_P (t))
5eb2a9f2 3061 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
3062
3063 /* Now process sub-blocks. */
87caf699 3064 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 3065 set_used_decls (t);
2d4aecb3
AO
3066}
3067
23b2ce53 3068/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
3069 Recursively does the same for subexpressions. Uses
3070 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
3071
3072rtx
502b8322 3073copy_rtx_if_shared (rtx orig)
23b2ce53 3074{
32b32b16
AP
3075 copy_rtx_if_shared_1 (&orig);
3076 return orig;
3077}
3078
ff954f39
AP
3079/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3080 use. Recursively does the same for subexpressions. */
3081
32b32b16
AP
3082static void
3083copy_rtx_if_shared_1 (rtx *orig1)
3084{
3085 rtx x;
b3694847
SS
3086 int i;
3087 enum rtx_code code;
32b32b16 3088 rtx *last_ptr;
b3694847 3089 const char *format_ptr;
23b2ce53 3090 int copied = 0;
32b32b16
AP
3091 int length;
3092
3093 /* Repeat is used to turn tail-recursion into iteration. */
3094repeat:
3095 x = *orig1;
23b2ce53
RS
3096
3097 if (x == 0)
32b32b16 3098 return;
23b2ce53
RS
3099
3100 code = GET_CODE (x);
3101
3102 /* These types may be freely shared. */
3103
3104 switch (code)
3105 {
3106 case REG:
0ca5af51
AO
3107 case DEBUG_EXPR:
3108 case VALUE:
d8116890 3109 CASE_CONST_ANY:
23b2ce53 3110 case SYMBOL_REF:
2c07f13b 3111 case LABEL_REF:
23b2ce53
RS
3112 case CODE_LABEL:
3113 case PC:
3114 case CC0:
276e0224 3115 case RETURN:
26898771 3116 case SIMPLE_RETURN:
23b2ce53 3117 case SCRATCH:
0f41302f 3118 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 3119 return;
3e89ed8d 3120 case CLOBBER:
c5c5ba89
JH
3121 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3122 clobbers or clobbers of hard registers that originated as pseudos.
3123 This is needed to allow safe register renaming. */
d7ae3739
EB
3124 if (REG_P (XEXP (x, 0))
3125 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3126 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3e89ed8d
JH
3127 return;
3128 break;
23b2ce53 3129
b851ea09 3130 case CONST:
6fb5fa3c 3131 if (shared_const_p (x))
32b32b16 3132 return;
b851ea09
RK
3133 break;
3134
b5b8b0ac 3135 case DEBUG_INSN:
23b2ce53
RS
3136 case INSN:
3137 case JUMP_INSN:
3138 case CALL_INSN:
3139 case NOTE:
23b2ce53
RS
3140 case BARRIER:
3141 /* The chain of insns is not being copied. */
32b32b16 3142 return;
23b2ce53 3143
e9a25f70
JL
3144 default:
3145 break;
23b2ce53
RS
3146 }
3147
3148 /* This rtx may not be shared. If it has already been seen,
3149 replace it with a copy of itself. */
3150
2adc7f12 3151 if (RTX_FLAG (x, used))
23b2ce53 3152 {
aacd3885 3153 x = shallow_copy_rtx (x);
23b2ce53
RS
3154 copied = 1;
3155 }
2adc7f12 3156 RTX_FLAG (x, used) = 1;
23b2ce53
RS
3157
3158 /* Now scan the subexpressions recursively.
3159 We can store any replaced subexpressions directly into X
3160 since we know X is not shared! Any vectors in X
3161 must be copied if X was copied. */
3162
3163 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
3164 length = GET_RTX_LENGTH (code);
3165 last_ptr = NULL;
b8698a0f 3166
32b32b16 3167 for (i = 0; i < length; i++)
23b2ce53
RS
3168 {
3169 switch (*format_ptr++)
3170 {
3171 case 'e':
32b32b16
AP
3172 if (last_ptr)
3173 copy_rtx_if_shared_1 (last_ptr);
3174 last_ptr = &XEXP (x, i);
23b2ce53
RS
3175 break;
3176
3177 case 'E':
3178 if (XVEC (x, i) != NULL)
3179 {
b3694847 3180 int j;
f0722107 3181 int len = XVECLEN (x, i);
b8698a0f 3182
6614fd40
KH
3183 /* Copy the vector iff I copied the rtx and the length
3184 is nonzero. */
f0722107 3185 if (copied && len > 0)
8f985ec4 3186 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 3187
5d3cc252 3188 /* Call recursively on all inside the vector. */
f0722107 3189 for (j = 0; j < len; j++)
32b32b16
AP
3190 {
3191 if (last_ptr)
3192 copy_rtx_if_shared_1 (last_ptr);
3193 last_ptr = &XVECEXP (x, i, j);
3194 }
23b2ce53
RS
3195 }
3196 break;
3197 }
3198 }
32b32b16
AP
3199 *orig1 = x;
3200 if (last_ptr)
3201 {
3202 orig1 = last_ptr;
3203 goto repeat;
3204 }
3205 return;
23b2ce53
RS
3206}
3207
76369a82 3208/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 3209
76369a82
NF
3210static void
3211mark_used_flags (rtx x, int flag)
23b2ce53 3212{
b3694847
SS
3213 int i, j;
3214 enum rtx_code code;
3215 const char *format_ptr;
32b32b16 3216 int length;
23b2ce53 3217
32b32b16
AP
3218 /* Repeat is used to turn tail-recursion into iteration. */
3219repeat:
23b2ce53
RS
3220 if (x == 0)
3221 return;
3222
3223 code = GET_CODE (x);
3224
9faa82d8 3225 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3226 for them. */
3227
3228 switch (code)
3229 {
3230 case REG:
0ca5af51
AO
3231 case DEBUG_EXPR:
3232 case VALUE:
d8116890 3233 CASE_CONST_ANY:
23b2ce53
RS
3234 case SYMBOL_REF:
3235 case CODE_LABEL:
3236 case PC:
3237 case CC0:
276e0224 3238 case RETURN:
26898771 3239 case SIMPLE_RETURN:
23b2ce53
RS
3240 return;
3241
b5b8b0ac 3242 case DEBUG_INSN:
23b2ce53
RS
3243 case INSN:
3244 case JUMP_INSN:
3245 case CALL_INSN:
3246 case NOTE:
3247 case LABEL_REF:
3248 case BARRIER:
3249 /* The chain of insns is not being copied. */
3250 return;
750c9258 3251
e9a25f70
JL
3252 default:
3253 break;
23b2ce53
RS
3254 }
3255
76369a82 3256 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3257
3258 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3259 length = GET_RTX_LENGTH (code);
b8698a0f 3260
32b32b16 3261 for (i = 0; i < length; i++)
23b2ce53
RS
3262 {
3263 switch (*format_ptr++)
3264 {
3265 case 'e':
32b32b16
AP
3266 if (i == length-1)
3267 {
3268 x = XEXP (x, i);
3269 goto repeat;
3270 }
76369a82 3271 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3272 break;
3273
3274 case 'E':
3275 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3276 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3277 break;
3278 }
3279 }
3280}
2c07f13b 3281
76369a82 3282/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3283 to look for shared sub-parts. */
3284
3285void
76369a82 3286reset_used_flags (rtx x)
2c07f13b 3287{
76369a82
NF
3288 mark_used_flags (x, 0);
3289}
2c07f13b 3290
76369a82
NF
3291/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3292 to look for shared sub-parts. */
2c07f13b 3293
76369a82
NF
3294void
3295set_used_flags (rtx x)
3296{
3297 mark_used_flags (x, 1);
2c07f13b 3298}
23b2ce53
RS
3299\f
3300/* Copy X if necessary so that it won't be altered by changes in OTHER.
3301 Return X or the rtx for the pseudo reg the value of X was copied into.
3302 OTHER must be valid as a SET_DEST. */
3303
3304rtx
502b8322 3305make_safe_from (rtx x, rtx other)
23b2ce53
RS
3306{
3307 while (1)
3308 switch (GET_CODE (other))
3309 {
3310 case SUBREG:
3311 other = SUBREG_REG (other);
3312 break;
3313 case STRICT_LOW_PART:
3314 case SIGN_EXTEND:
3315 case ZERO_EXTEND:
3316 other = XEXP (other, 0);
3317 break;
3318 default:
3319 goto done;
3320 }
3321 done:
3c0cb5de 3322 if ((MEM_P (other)
23b2ce53 3323 && ! CONSTANT_P (x)
f8cfc6aa 3324 && !REG_P (x)
23b2ce53 3325 && GET_CODE (x) != SUBREG)
f8cfc6aa 3326 || (REG_P (other)
23b2ce53
RS
3327 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3328 || reg_mentioned_p (other, x))))
3329 {
3330 rtx temp = gen_reg_rtx (GET_MODE (x));
3331 emit_move_insn (temp, x);
3332 return temp;
3333 }
3334 return x;
3335}
3336\f
3337/* Emission of insns (adding them to the doubly-linked list). */
3338
23b2ce53
RS
3339/* Return the last insn emitted, even if it is in a sequence now pushed. */
3340
db76cf1e 3341rtx_insn *
502b8322 3342get_last_insn_anywhere (void)
23b2ce53 3343{
614d5bd8
AM
3344 struct sequence_stack *seq;
3345 for (seq = get_current_sequence (); seq; seq = seq->next)
3346 if (seq->last != 0)
3347 return seq->last;
23b2ce53
RS
3348 return 0;
3349}
3350
2a496e8b
JDA
3351/* Return the first nonnote insn emitted in current sequence or current
3352 function. This routine looks inside SEQUENCEs. */
3353
e4685bc8 3354rtx_insn *
502b8322 3355get_first_nonnote_insn (void)
2a496e8b 3356{
dc01c3d1 3357 rtx_insn *insn = get_insns ();
91373fe8
JDA
3358
3359 if (insn)
3360 {
3361 if (NOTE_P (insn))
3362 for (insn = next_insn (insn);
3363 insn && NOTE_P (insn);
3364 insn = next_insn (insn))
3365 continue;
3366 else
3367 {
2ca202e7 3368 if (NONJUMP_INSN_P (insn)
91373fe8 3369 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3370 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3371 }
3372 }
2a496e8b
JDA
3373
3374 return insn;
3375}
3376
3377/* Return the last nonnote insn emitted in current sequence or current
3378 function. This routine looks inside SEQUENCEs. */
3379
e4685bc8 3380rtx_insn *
502b8322 3381get_last_nonnote_insn (void)
2a496e8b 3382{
dc01c3d1 3383 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3384
3385 if (insn)
3386 {
3387 if (NOTE_P (insn))
3388 for (insn = previous_insn (insn);
3389 insn && NOTE_P (insn);
3390 insn = previous_insn (insn))
3391 continue;
3392 else
3393 {
dc01c3d1
DM
3394 if (NONJUMP_INSN_P (insn))
3395 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3396 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3397 }
3398 }
2a496e8b
JDA
3399
3400 return insn;
3401}
3402
b5b8b0ac
AO
3403/* Return the number of actual (non-debug) insns emitted in this
3404 function. */
3405
3406int
3407get_max_insn_count (void)
3408{
3409 int n = cur_insn_uid;
3410
3411 /* The table size must be stable across -g, to avoid codegen
3412 differences due to debug insns, and not be affected by
3413 -fmin-insn-uid, to avoid excessive table size and to simplify
3414 debugging of -fcompare-debug failures. */
3415 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3416 n -= cur_debug_insn_uid;
3417 else
3418 n -= MIN_NONDEBUG_INSN_UID;
3419
3420 return n;
3421}
3422
23b2ce53
RS
3423\f
3424/* Return the next insn. If it is a SEQUENCE, return the first insn
3425 of the sequence. */
3426
eb51c837 3427rtx_insn *
4ce524a1 3428next_insn (rtx_insn *insn)
23b2ce53 3429{
75547801
KG
3430 if (insn)
3431 {
3432 insn = NEXT_INSN (insn);
3433 if (insn && NONJUMP_INSN_P (insn)
3434 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3435 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3436 }
23b2ce53 3437
dc01c3d1 3438 return insn;
23b2ce53
RS
3439}
3440
3441/* Return the previous insn. If it is a SEQUENCE, return the last insn
3442 of the sequence. */
3443
eb51c837 3444rtx_insn *
4ce524a1 3445previous_insn (rtx_insn *insn)
23b2ce53 3446{
75547801
KG
3447 if (insn)
3448 {
3449 insn = PREV_INSN (insn);
dc01c3d1
DM
3450 if (insn && NONJUMP_INSN_P (insn))
3451 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3452 insn = seq->insn (seq->len () - 1);
75547801 3453 }
23b2ce53 3454
dc01c3d1 3455 return insn;
23b2ce53
RS
3456}
3457
3458/* Return the next insn after INSN that is not a NOTE. This routine does not
3459 look inside SEQUENCEs. */
3460
eb51c837 3461rtx_insn *
c9b0a227 3462next_nonnote_insn (rtx_insn *insn)
23b2ce53 3463{
75547801
KG
3464 while (insn)
3465 {
3466 insn = NEXT_INSN (insn);
3467 if (insn == 0 || !NOTE_P (insn))
3468 break;
3469 }
23b2ce53 3470
dc01c3d1 3471 return insn;
23b2ce53
RS
3472}
3473
f40dd646
AO
3474/* Return the next insn after INSN that is not a DEBUG_INSN. This
3475 routine does not look inside SEQUENCEs. */
1e211590 3476
eb51c837 3477rtx_insn *
f40dd646 3478next_nondebug_insn (rtx_insn *insn)
1e211590
DD
3479{
3480 while (insn)
3481 {
3482 insn = NEXT_INSN (insn);
f40dd646 3483 if (insn == 0 || !DEBUG_INSN_P (insn))
1e211590 3484 break;
1e211590
DD
3485 }
3486
dc01c3d1 3487 return insn;
1e211590
DD
3488}
3489
23b2ce53
RS
3490/* Return the previous insn before INSN that is not a NOTE. This routine does
3491 not look inside SEQUENCEs. */
3492
eb51c837 3493rtx_insn *
c9b0a227 3494prev_nonnote_insn (rtx_insn *insn)
23b2ce53 3495{
75547801
KG
3496 while (insn)
3497 {
3498 insn = PREV_INSN (insn);
3499 if (insn == 0 || !NOTE_P (insn))
3500 break;
3501 }
23b2ce53 3502
dc01c3d1 3503 return insn;
23b2ce53
RS
3504}
3505
f40dd646
AO
3506/* Return the previous insn before INSN that is not a DEBUG_INSN.
3507 This routine does not look inside SEQUENCEs. */
896aa4ea 3508
eb51c837 3509rtx_insn *
f40dd646 3510prev_nondebug_insn (rtx_insn *insn)
896aa4ea
DD
3511{
3512 while (insn)
3513 {
3514 insn = PREV_INSN (insn);
f40dd646 3515 if (insn == 0 || !DEBUG_INSN_P (insn))
896aa4ea 3516 break;
896aa4ea
DD
3517 }
3518
dc01c3d1 3519 return insn;
896aa4ea
DD
3520}
3521
f40dd646
AO
3522/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3523 This routine does not look inside SEQUENCEs. */
b5b8b0ac 3524
eb51c837 3525rtx_insn *
f40dd646 3526next_nonnote_nondebug_insn (rtx_insn *insn)
b5b8b0ac
AO
3527{
3528 while (insn)
3529 {
3530 insn = NEXT_INSN (insn);
f40dd646 3531 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
b5b8b0ac
AO
3532 break;
3533 }
3534
dc01c3d1 3535 return insn;
b5b8b0ac
AO
3536}
3537
f40dd646
AO
3538/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3539 but stop the search before we enter another basic block. This
3540 routine does not look inside SEQUENCEs. */
b5b8b0ac 3541
eb51c837 3542rtx_insn *
f40dd646 3543next_nonnote_nondebug_insn_bb (rtx_insn *insn)
b5b8b0ac
AO
3544{
3545 while (insn)
3546 {
f40dd646
AO
3547 insn = NEXT_INSN (insn);
3548 if (insn == 0)
3549 break;
3550 if (DEBUG_INSN_P (insn))
3551 continue;
3552 if (!NOTE_P (insn))
b5b8b0ac 3553 break;
f40dd646
AO
3554 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3555 return NULL;
b5b8b0ac
AO
3556 }
3557
dc01c3d1 3558 return insn;
b5b8b0ac
AO
3559}
3560
f40dd646 3561/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
f0fc0803
JJ
3562 This routine does not look inside SEQUENCEs. */
3563
eb51c837 3564rtx_insn *
f40dd646 3565prev_nonnote_nondebug_insn (rtx_insn *insn)
f0fc0803
JJ
3566{
3567 while (insn)
3568 {
f40dd646 3569 insn = PREV_INSN (insn);
f0fc0803
JJ
3570 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3571 break;
3572 }
3573
dc01c3d1 3574 return insn;
f0fc0803
JJ
3575}
3576
f40dd646
AO
3577/* Return the previous insn before INSN that is not a NOTE nor
3578 DEBUG_INSN, but stop the search before we enter another basic
3579 block. This routine does not look inside SEQUENCEs. */
f0fc0803 3580
eb51c837 3581rtx_insn *
f40dd646 3582prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
f0fc0803
JJ
3583{
3584 while (insn)
3585 {
3586 insn = PREV_INSN (insn);
f40dd646 3587 if (insn == 0)
f0fc0803 3588 break;
f40dd646
AO
3589 if (DEBUG_INSN_P (insn))
3590 continue;
3591 if (!NOTE_P (insn))
3592 break;
3593 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3594 return NULL;
f0fc0803
JJ
3595 }
3596
dc01c3d1 3597 return insn;
f0fc0803
JJ
3598}
3599
23b2ce53
RS
3600/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3601 or 0, if there is none. This routine does not look inside
0f41302f 3602 SEQUENCEs. */
23b2ce53 3603
eb51c837 3604rtx_insn *
dc01c3d1 3605next_real_insn (rtx uncast_insn)
23b2ce53 3606{
dc01c3d1
DM
3607 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3608
75547801
KG
3609 while (insn)
3610 {
3611 insn = NEXT_INSN (insn);
3612 if (insn == 0 || INSN_P (insn))
3613 break;
3614 }
23b2ce53 3615
dc01c3d1 3616 return insn;
23b2ce53
RS
3617}
3618
3619/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3620 or 0, if there is none. This routine does not look inside
3621 SEQUENCEs. */
3622
eb51c837 3623rtx_insn *
d8fd56b2 3624prev_real_insn (rtx_insn *insn)
23b2ce53 3625{
75547801
KG
3626 while (insn)
3627 {
3628 insn = PREV_INSN (insn);
3629 if (insn == 0 || INSN_P (insn))
3630 break;
3631 }
23b2ce53 3632
dc01c3d1 3633 return insn;
23b2ce53
RS
3634}
3635
ee960939
OH
3636/* Return the last CALL_INSN in the current list, or 0 if there is none.
3637 This routine does not look inside SEQUENCEs. */
3638
049cfc4a 3639rtx_call_insn *
502b8322 3640last_call_insn (void)
ee960939 3641{
049cfc4a 3642 rtx_insn *insn;
ee960939
OH
3643
3644 for (insn = get_last_insn ();
4b4bf941 3645 insn && !CALL_P (insn);
ee960939
OH
3646 insn = PREV_INSN (insn))
3647 ;
3648
049cfc4a 3649 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3650}
3651
23b2ce53 3652/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3653 does not look inside SEQUENCEs. After reload this also skips over
3654 standalone USE and CLOBBER insn. */
23b2ce53 3655
69732dcb 3656int
7c9796ed 3657active_insn_p (const rtx_insn *insn)
69732dcb 3658{
4b4bf941 3659 return (CALL_P (insn) || JUMP_P (insn)
39718607 3660 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3661 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3662 && (! reload_completed
3663 || (GET_CODE (PATTERN (insn)) != USE
3664 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3665}
3666
eb51c837 3667rtx_insn *
7c9796ed 3668next_active_insn (rtx_insn *insn)
23b2ce53 3669{
75547801
KG
3670 while (insn)
3671 {
3672 insn = NEXT_INSN (insn);
3673 if (insn == 0 || active_insn_p (insn))
3674 break;
3675 }
23b2ce53 3676
dc01c3d1 3677 return insn;
23b2ce53
RS
3678}
3679
3680/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3681 does not look inside SEQUENCEs. After reload this also skips over
3682 standalone USE and CLOBBER insn. */
23b2ce53 3683
eb51c837 3684rtx_insn *
7c9796ed 3685prev_active_insn (rtx_insn *insn)
23b2ce53 3686{
75547801
KG
3687 while (insn)
3688 {
3689 insn = PREV_INSN (insn);
3690 if (insn == 0 || active_insn_p (insn))
3691 break;
3692 }
23b2ce53 3693
dc01c3d1 3694 return insn;
23b2ce53 3695}
23b2ce53 3696\f
23b2ce53
RS
3697/* Return the next insn that uses CC0 after INSN, which is assumed to
3698 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3699 applied to the result of this function should yield INSN).
3700
3701 Normally, this is simply the next insn. However, if a REG_CC_USER note
3702 is present, it contains the insn that uses CC0.
3703
3704 Return 0 if we can't find the insn. */
3705
75b46023 3706rtx_insn *
475edec0 3707next_cc0_user (rtx_insn *insn)
23b2ce53 3708{
906c4e36 3709 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3710
3711 if (note)
75b46023 3712 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3713
3714 insn = next_nonnote_insn (insn);
4b4bf941 3715 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3716 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3717
2c3c49de 3718 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3719 return insn;
23b2ce53
RS
3720
3721 return 0;
3722}
3723
3724/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3725 note, it is the previous insn. */
3726
75b46023 3727rtx_insn *
5c8db5b4 3728prev_cc0_setter (rtx_insn *insn)
23b2ce53 3729{
906c4e36 3730 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3731
3732 if (note)
75b46023 3733 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3734
3735 insn = prev_nonnote_insn (insn);
5b0264cb 3736 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3737
dc01c3d1 3738 return insn;
23b2ce53 3739}
e5bef2e4 3740
594f8779
RZ
3741/* Find a RTX_AUTOINC class rtx which matches DATA. */
3742
3743static int
9021b8ec 3744find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3745{
9021b8ec
RS
3746 subrtx_iterator::array_type array;
3747 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3748 {
9021b8ec
RS
3749 const_rtx x = *iter;
3750 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3751 && rtx_equal_p (reg, XEXP (x, 0)))
3752 return true;
594f8779 3753 }
9021b8ec 3754 return false;
594f8779 3755}
594f8779 3756
e5bef2e4
HB
3757/* Increment the label uses for all labels present in rtx. */
3758
3759static void
502b8322 3760mark_label_nuses (rtx x)
e5bef2e4 3761{
b3694847
SS
3762 enum rtx_code code;
3763 int i, j;
3764 const char *fmt;
e5bef2e4
HB
3765
3766 code = GET_CODE (x);
04a121a7
TS
3767 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3768 LABEL_NUSES (label_ref_label (x))++;
e5bef2e4
HB
3769
3770 fmt = GET_RTX_FORMAT (code);
3771 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3772 {
3773 if (fmt[i] == 'e')
0fb7aeda 3774 mark_label_nuses (XEXP (x, i));
e5bef2e4 3775 else if (fmt[i] == 'E')
0fb7aeda 3776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3777 mark_label_nuses (XVECEXP (x, i, j));
3778 }
3779}
3780
23b2ce53
RS
3781\f
3782/* Try splitting insns that can be split for better scheduling.
3783 PAT is the pattern which might split.
3784 TRIAL is the insn providing PAT.
cc2902df 3785 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3786
3787 If this routine succeeds in splitting, it returns the first or last
11147ebe 3788 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3789 returns TRIAL. If the insn to be returned can be split, it will be. */
3790
53f04688 3791rtx_insn *
bb5c4956 3792try_split (rtx pat, rtx_insn *trial, int last)
23b2ce53 3793{
d4eff95b 3794 rtx_insn *before, *after;
dc01c3d1
DM
3795 rtx note;
3796 rtx_insn *seq, *tem;
5fa396ad 3797 profile_probability probability;
dc01c3d1 3798 rtx_insn *insn_last, *insn;
599aedd9 3799 int njumps = 0;
e67d1102 3800 rtx_insn *call_insn = NULL;
6b24c259 3801
cd9c1ca8
RH
3802 /* We're not good at redistributing frame information. */
3803 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3804 return trial;
cd9c1ca8 3805
6b24c259
JH
3806 if (any_condjump_p (trial)
3807 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
5fa396ad
JH
3808 split_branch_probability
3809 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3810 else
3811 split_branch_probability = profile_probability::uninitialized ();
3812
6b24c259
JH
3813 probability = split_branch_probability;
3814
bb5c4956 3815 seq = split_insns (pat, trial);
6b24c259 3816
5fa396ad 3817 split_branch_probability = profile_probability::uninitialized ();
23b2ce53 3818
599aedd9 3819 if (!seq)
dc01c3d1 3820 return trial;
599aedd9
RH
3821
3822 /* Avoid infinite loop if any insn of the result matches
3823 the original pattern. */
3824 insn_last = seq;
3825 while (1)
23b2ce53 3826 {
599aedd9
RH
3827 if (INSN_P (insn_last)
3828 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3829 return trial;
599aedd9
RH
3830 if (!NEXT_INSN (insn_last))
3831 break;
3832 insn_last = NEXT_INSN (insn_last);
3833 }
750c9258 3834
6fb5fa3c
DB
3835 /* We will be adding the new sequence to the function. The splitters
3836 may have introduced invalid RTL sharing, so unshare the sequence now. */
3837 unshare_all_rtl_in_chain (seq);
3838
339ba33b 3839 /* Mark labels and copy flags. */
599aedd9
RH
3840 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3841 {
4b4bf941 3842 if (JUMP_P (insn))
599aedd9 3843 {
339ba33b
RS
3844 if (JUMP_P (trial))
3845 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3846 mark_jump_label (PATTERN (insn), insn, 0);
3847 njumps++;
5fa396ad 3848 if (probability.initialized_p ()
599aedd9
RH
3849 && any_condjump_p (insn)
3850 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3851 {
599aedd9
RH
3852 /* We can preserve the REG_BR_PROB notes only if exactly
3853 one jump is created, otherwise the machine description
3854 is responsible for this step using
3855 split_branch_probability variable. */
5b0264cb 3856 gcc_assert (njumps == 1);
5fa396ad 3857 add_reg_br_prob_note (insn, probability);
2f937369 3858 }
599aedd9
RH
3859 }
3860 }
3861
3862 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3863 in SEQ and copy any additional information across. */
4b4bf941 3864 if (CALL_P (trial))
599aedd9
RH
3865 {
3866 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3867 if (CALL_P (insn))
599aedd9 3868 {
dc01c3d1
DM
3869 rtx_insn *next;
3870 rtx *p;
65712d5c 3871
4f660b15
RO
3872 gcc_assert (call_insn == NULL_RTX);
3873 call_insn = insn;
3874
65712d5c
RS
3875 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3876 target may have explicitly specified. */
3877 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3878 while (*p)
3879 p = &XEXP (*p, 1);
3880 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3881
3882 /* If the old call was a sibling call, the new one must
3883 be too. */
599aedd9 3884 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3885
3886 /* If the new call is the last instruction in the sequence,
3887 it will effectively replace the old call in-situ. Otherwise
3888 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3889 so that it comes immediately after the new call. */
3890 if (NEXT_INSN (insn))
65f3dedb
RS
3891 for (next = NEXT_INSN (trial);
3892 next && NOTE_P (next);
3893 next = NEXT_INSN (next))
3894 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3895 {
3896 remove_insn (next);
3897 add_insn_after (next, insn, NULL);
65f3dedb 3898 break;
65712d5c 3899 }
599aedd9
RH
3900 }
3901 }
4b5e8abe 3902
599aedd9
RH
3903 /* Copy notes, particularly those related to the CFG. */
3904 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3905 {
3906 switch (REG_NOTE_KIND (note))
3907 {
3908 case REG_EH_REGION:
1d65f45c 3909 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3910 break;
216183ce 3911
599aedd9
RH
3912 case REG_NORETURN:
3913 case REG_SETJMP:
0a35513e 3914 case REG_TM:
5c5f0b65 3915 case REG_CALL_NOCF_CHECK:
594f8779 3916 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3917 {
4b4bf941 3918 if (CALL_P (insn))
65c5f2a6 3919 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3920 }
599aedd9 3921 break;
d6e95df8 3922
599aedd9 3923 case REG_NON_LOCAL_GOTO:
594f8779 3924 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3925 {
4b4bf941 3926 if (JUMP_P (insn))
65c5f2a6 3927 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3928 }
599aedd9 3929 break;
e5bef2e4 3930
594f8779 3931 case REG_INC:
760edf20
TS
3932 if (!AUTO_INC_DEC)
3933 break;
3934
594f8779
RZ
3935 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3936 {
3937 rtx reg = XEXP (note, 0);
3938 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3939 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3940 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3941 }
3942 break;
594f8779 3943
9a08d230 3944 case REG_ARGS_SIZE:
68184180 3945 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
9a08d230
RH
3946 break;
3947
4f660b15
RO
3948 case REG_CALL_DECL:
3949 gcc_assert (call_insn != NULL_RTX);
3950 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3951 break;
3952
599aedd9
RH
3953 default:
3954 break;
23b2ce53 3955 }
599aedd9
RH
3956 }
3957
3958 /* If there are LABELS inside the split insns increment the
3959 usage count so we don't delete the label. */
cf7c4aa6 3960 if (INSN_P (trial))
599aedd9
RH
3961 {
3962 insn = insn_last;
3963 while (insn != NULL_RTX)
23b2ce53 3964 {
cf7c4aa6 3965 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3966 if (NONJUMP_INSN_P (insn))
599aedd9 3967 mark_label_nuses (PATTERN (insn));
23b2ce53 3968
599aedd9
RH
3969 insn = PREV_INSN (insn);
3970 }
23b2ce53
RS
3971 }
3972
d4eff95b
JC
3973 before = PREV_INSN (trial);
3974 after = NEXT_INSN (trial);
3975
5368224f 3976 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3977
3978 delete_insn (trial);
599aedd9
RH
3979
3980 /* Recursively call try_split for each new insn created; by the
3981 time control returns here that insn will be fully split, so
3982 set LAST and continue from the insn after the one returned.
3983 We can't use next_active_insn here since AFTER may be a note.
3984 Ignore deleted insns, which can be occur if not optimizing. */
3985 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4654c0cf 3986 if (! tem->deleted () && INSN_P (tem))
599aedd9
RH
3987 tem = try_split (PATTERN (tem), tem, 1);
3988
3989 /* Return either the first or the last insn, depending on which was
3990 requested. */
3991 return last
5936d944 3992 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3993 : NEXT_INSN (before);
23b2ce53
RS
3994}
3995\f
3996/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3997 Store PATTERN in the pattern slots. */
23b2ce53 3998
167b9fae 3999rtx_insn *
502b8322 4000make_insn_raw (rtx pattern)
23b2ce53 4001{
167b9fae 4002 rtx_insn *insn;
23b2ce53 4003
167b9fae 4004 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 4005
43127294 4006 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
4007 PATTERN (insn) = pattern;
4008 INSN_CODE (insn) = -1;
1632afca 4009 REG_NOTES (insn) = NULL;
5368224f 4010 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 4011 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 4012
47984720
NC
4013#ifdef ENABLE_RTL_CHECKING
4014 if (insn
2c3c49de 4015 && INSN_P (insn)
47984720
NC
4016 && (returnjump_p (insn)
4017 || (GET_CODE (insn) == SET
4018 && SET_DEST (insn) == pc_rtx)))
4019 {
d4ee4d25 4020 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
4021 debug_rtx (insn);
4022 }
4023#endif
750c9258 4024
23b2ce53
RS
4025 return insn;
4026}
4027
b5b8b0ac
AO
4028/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4029
167b9fae 4030static rtx_insn *
b5b8b0ac
AO
4031make_debug_insn_raw (rtx pattern)
4032{
167b9fae 4033 rtx_debug_insn *insn;
b5b8b0ac 4034
167b9fae 4035 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
4036 INSN_UID (insn) = cur_debug_insn_uid++;
4037 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
4038 INSN_UID (insn) = cur_insn_uid++;
4039
4040 PATTERN (insn) = pattern;
4041 INSN_CODE (insn) = -1;
4042 REG_NOTES (insn) = NULL;
5368224f 4043 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
4044 BLOCK_FOR_INSN (insn) = NULL;
4045
4046 return insn;
4047}
4048
2f937369 4049/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 4050
167b9fae 4051static rtx_insn *
502b8322 4052make_jump_insn_raw (rtx pattern)
23b2ce53 4053{
167b9fae 4054 rtx_jump_insn *insn;
23b2ce53 4055
167b9fae 4056 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 4057 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
4058
4059 PATTERN (insn) = pattern;
4060 INSN_CODE (insn) = -1;
1632afca
RS
4061 REG_NOTES (insn) = NULL;
4062 JUMP_LABEL (insn) = NULL;
5368224f 4063 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 4064 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
4065
4066 return insn;
4067}
aff507f4 4068
2f937369 4069/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 4070
167b9fae 4071static rtx_insn *
502b8322 4072make_call_insn_raw (rtx pattern)
aff507f4 4073{
167b9fae 4074 rtx_call_insn *insn;
aff507f4 4075
167b9fae 4076 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
4077 INSN_UID (insn) = cur_insn_uid++;
4078
4079 PATTERN (insn) = pattern;
4080 INSN_CODE (insn) = -1;
aff507f4
RK
4081 REG_NOTES (insn) = NULL;
4082 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 4083 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 4084 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
4085
4086 return insn;
4087}
96fba521
SB
4088
4089/* Like `make_insn_raw' but make a NOTE instead of an insn. */
4090
66e8df53 4091static rtx_note *
96fba521
SB
4092make_note_raw (enum insn_note subtype)
4093{
4094 /* Some notes are never created this way at all. These notes are
4095 only created by patching out insns. */
4096 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4097 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4098
66e8df53 4099 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
4100 INSN_UID (note) = cur_insn_uid++;
4101 NOTE_KIND (note) = subtype;
4102 BLOCK_FOR_INSN (note) = NULL;
4103 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4104 return note;
4105}
23b2ce53 4106\f
96fba521
SB
4107/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4108 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4109 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4110
4111static inline void
9152e0aa 4112link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 4113{
0f82e5c9
DM
4114 SET_PREV_INSN (insn) = prev;
4115 SET_NEXT_INSN (insn) = next;
96fba521
SB
4116 if (prev != NULL)
4117 {
0f82e5c9 4118 SET_NEXT_INSN (prev) = insn;
96fba521
SB
4119 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4120 {
e6eda746
DM
4121 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4122 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
4123 }
4124 }
4125 if (next != NULL)
4126 {
0f82e5c9 4127 SET_PREV_INSN (next) = insn;
96fba521 4128 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4129 {
4130 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4131 SET_PREV_INSN (sequence->insn (0)) = insn;
4132 }
96fba521 4133 }
3ccb989e
SB
4134
4135 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4136 {
e6eda746
DM
4137 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4138 SET_PREV_INSN (sequence->insn (0)) = prev;
4139 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 4140 }
96fba521
SB
4141}
4142
23b2ce53
RS
4143/* Add INSN to the end of the doubly-linked list.
4144 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4145
4146void
9152e0aa 4147add_insn (rtx_insn *insn)
23b2ce53 4148{
9152e0aa 4149 rtx_insn *prev = get_last_insn ();
96fba521 4150 link_insn_into_chain (insn, prev, NULL);
01512446 4151 if (get_insns () == NULL)
5936d944 4152 set_first_insn (insn);
5936d944 4153 set_last_insn (insn);
23b2ce53
RS
4154}
4155
96fba521 4156/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 4157
96fba521 4158static void
9152e0aa 4159add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 4160{
9152e0aa 4161 rtx_insn *next = NEXT_INSN (after);
23b2ce53 4162
4654c0cf 4163 gcc_assert (!optimize || !after->deleted ());
ba213285 4164
96fba521 4165 link_insn_into_chain (insn, after, next);
23b2ce53 4166
96fba521 4167 if (next == NULL)
23b2ce53 4168 {
614d5bd8
AM
4169 struct sequence_stack *seq;
4170
4171 for (seq = get_current_sequence (); seq; seq = seq->next)
4172 if (after == seq->last)
4173 {
4174 seq->last = insn;
4175 break;
4176 }
23b2ce53 4177 }
96fba521
SB
4178}
4179
4180/* Add INSN into the doubly-linked list before insn BEFORE. */
4181
4182static void
9152e0aa 4183add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4184{
9152e0aa 4185 rtx_insn *prev = PREV_INSN (before);
96fba521 4186
4654c0cf 4187 gcc_assert (!optimize || !before->deleted ());
96fba521
SB
4188
4189 link_insn_into_chain (insn, prev, before);
4190
4191 if (prev == NULL)
23b2ce53 4192 {
614d5bd8 4193 struct sequence_stack *seq;
a0ae8e8d 4194
614d5bd8
AM
4195 for (seq = get_current_sequence (); seq; seq = seq->next)
4196 if (before == seq->first)
4197 {
4198 seq->first = insn;
4199 break;
4200 }
4201
4202 gcc_assert (seq);
23b2ce53 4203 }
96fba521
SB
4204}
4205
4206/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4207 If BB is NULL, an attempt is made to infer the bb from before.
4208
4209 This and the next function should be the only functions called
4210 to insert an insn once delay slots have been filled since only
4211 they know how to update a SEQUENCE. */
23b2ce53 4212
96fba521 4213void
9152e0aa 4214add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
96fba521 4215{
1130d5e3 4216 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
9152e0aa 4217 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
96fba521 4218 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4219 if (!BARRIER_P (after)
4220 && !BARRIER_P (insn)
3c030e88
JH
4221 && (bb = BLOCK_FOR_INSN (after)))
4222 {
4223 set_block_for_insn (insn, bb);
38c1593d 4224 if (INSN_P (insn))
6fb5fa3c 4225 df_insn_rescan (insn);
3c030e88 4226 /* Should not happen as first in the BB is always
a1f300c0 4227 either NOTE or LABEL. */
a813c111 4228 if (BB_END (bb) == after
3c030e88 4229 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4230 && !BARRIER_P (insn)
a38e7aa5 4231 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4232 BB_END (bb) = insn;
3c030e88 4233 }
23b2ce53
RS
4234}
4235
96fba521
SB
4236/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4237 If BB is NULL, an attempt is made to infer the bb from before.
4238
4239 This and the previous function should be the only functions called
4240 to insert an insn once delay slots have been filled since only
4241 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4242
4243void
9152e0aa 4244add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
a0ae8e8d 4245{
9152e0aa
DM
4246 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4247 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
96fba521 4248 add_insn_before_nobb (insn, before);
a0ae8e8d 4249
b8698a0f 4250 if (!bb
6fb5fa3c
DB
4251 && !BARRIER_P (before)
4252 && !BARRIER_P (insn))
4253 bb = BLOCK_FOR_INSN (before);
4254
4255 if (bb)
3c030e88
JH
4256 {
4257 set_block_for_insn (insn, bb);
38c1593d 4258 if (INSN_P (insn))
6fb5fa3c 4259 df_insn_rescan (insn);
5b0264cb 4260 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4261 LABEL. */
5b0264cb
NS
4262 gcc_assert (BB_HEAD (bb) != insn
4263 /* Avoid clobbering of structure when creating new BB. */
4264 || BARRIER_P (insn)
a38e7aa5 4265 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4266 }
a0ae8e8d
RK
4267}
4268
6fb5fa3c
DB
4269/* Replace insn with an deleted instruction note. */
4270
0ce2b299
EB
4271void
4272set_insn_deleted (rtx insn)
6fb5fa3c 4273{
39718607 4274 if (INSN_P (insn))
b2908ba6 4275 df_insn_delete (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
4276 PUT_CODE (insn, NOTE);
4277 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4278}
4279
4280
1f397f45
SB
4281/* Unlink INSN from the insn chain.
4282
4283 This function knows how to handle sequences.
4284
4285 This function does not invalidate data flow information associated with
4286 INSN (i.e. does not call df_insn_delete). That makes this function
4287 usable for only disconnecting an insn from the chain, and re-emit it
4288 elsewhere later.
4289
4290 To later insert INSN elsewhere in the insn chain via add_insn and
4291 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4292 the caller. Nullifying them here breaks many insn chain walks.
4293
4294 To really delete an insn and related DF information, use delete_insn. */
4295
89e99eea 4296void
dc01c3d1 4297remove_insn (rtx uncast_insn)
89e99eea 4298{
dc01c3d1 4299 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1130d5e3
DM
4300 rtx_insn *next = NEXT_INSN (insn);
4301 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4302 basic_block bb;
4303
89e99eea
DB
4304 if (prev)
4305 {
0f82e5c9 4306 SET_NEXT_INSN (prev) = next;
4b4bf941 4307 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4308 {
e6eda746
DM
4309 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4310 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4311 }
4312 }
89e99eea
DB
4313 else
4314 {
614d5bd8
AM
4315 struct sequence_stack *seq;
4316
4317 for (seq = get_current_sequence (); seq; seq = seq->next)
4318 if (insn == seq->first)
89e99eea 4319 {
614d5bd8 4320 seq->first = next;
89e99eea
DB
4321 break;
4322 }
4323
614d5bd8 4324 gcc_assert (seq);
89e99eea
DB
4325 }
4326
4327 if (next)
4328 {
0f82e5c9 4329 SET_PREV_INSN (next) = prev;
4b4bf941 4330 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4331 {
4332 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4333 SET_PREV_INSN (sequence->insn (0)) = prev;
4334 }
89e99eea 4335 }
89e99eea
DB
4336 else
4337 {
614d5bd8
AM
4338 struct sequence_stack *seq;
4339
4340 for (seq = get_current_sequence (); seq; seq = seq->next)
4341 if (insn == seq->last)
89e99eea 4342 {
614d5bd8 4343 seq->last = prev;
89e99eea
DB
4344 break;
4345 }
4346
614d5bd8 4347 gcc_assert (seq);
89e99eea 4348 }
80eb8028 4349
80eb8028 4350 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4351 if (!BARRIER_P (insn)
53c17031
JH
4352 && (bb = BLOCK_FOR_INSN (insn)))
4353 {
a813c111 4354 if (BB_HEAD (bb) == insn)
53c17031 4355 {
3bf1e984
RK
4356 /* Never ever delete the basic block note without deleting whole
4357 basic block. */
5b0264cb 4358 gcc_assert (!NOTE_P (insn));
1130d5e3 4359 BB_HEAD (bb) = next;
53c17031 4360 }
a813c111 4361 if (BB_END (bb) == insn)
1130d5e3 4362 BB_END (bb) = prev;
53c17031 4363 }
89e99eea
DB
4364}
4365
ee960939
OH
4366/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4367
4368void
502b8322 4369add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4370{
5b0264cb 4371 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4372
4373 /* Put the register usage information on the CALL. If there is already
4374 some usage information, put ours at the end. */
4375 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4376 {
4377 rtx link;
4378
4379 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4380 link = XEXP (link, 1))
4381 ;
4382
4383 XEXP (link, 1) = call_fusage;
4384 }
4385 else
4386 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4387}
4388
23b2ce53
RS
4389/* Delete all insns made since FROM.
4390 FROM becomes the new last instruction. */
4391
4392void
fee3e72c 4393delete_insns_since (rtx_insn *from)
23b2ce53
RS
4394{
4395 if (from == 0)
5936d944 4396 set_first_insn (0);
23b2ce53 4397 else
0f82e5c9 4398 SET_NEXT_INSN (from) = 0;
5936d944 4399 set_last_insn (from);
23b2ce53
RS
4400}
4401
5dab5552
MS
4402/* This function is deprecated, please use sequences instead.
4403
4404 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4405 The insns to be moved are those between FROM and TO.
4406 They are moved to a new position after the insn AFTER.
4407 AFTER must not be FROM or TO or any insn in between.
4408
4409 This function does not know about SEQUENCEs and hence should not be
4410 called after delay-slot filling has been done. */
4411
4412void
fee3e72c 4413reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4414{
b2b29377
MM
4415 if (flag_checking)
4416 {
4417 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4418 gcc_assert (after != x);
4419 gcc_assert (after != to);
4420 }
4f8344eb 4421
23b2ce53
RS
4422 /* Splice this bunch out of where it is now. */
4423 if (PREV_INSN (from))
0f82e5c9 4424 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4425 if (NEXT_INSN (to))
0f82e5c9 4426 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4427 if (get_last_insn () == to)
4428 set_last_insn (PREV_INSN (from));
4429 if (get_insns () == from)
4430 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4431
4432 /* Make the new neighbors point to it and it to them. */
4433 if (NEXT_INSN (after))
0f82e5c9 4434 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4435
0f82e5c9
DM
4436 SET_NEXT_INSN (to) = NEXT_INSN (after);
4437 SET_PREV_INSN (from) = after;
4438 SET_NEXT_INSN (after) = from;
c3284718 4439 if (after == get_last_insn ())
5936d944 4440 set_last_insn (to);
23b2ce53
RS
4441}
4442
3c030e88
JH
4443/* Same as function above, but take care to update BB boundaries. */
4444void
ac9d2d2c 4445reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4446{
ac9d2d2c 4447 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4448 basic_block bb, bb2;
4449
4450 reorder_insns_nobb (from, to, after);
4451
4b4bf941 4452 if (!BARRIER_P (after)
3c030e88
JH
4453 && (bb = BLOCK_FOR_INSN (after)))
4454 {
b2908ba6 4455 rtx_insn *x;
6fb5fa3c 4456 df_set_bb_dirty (bb);
68252e27 4457
4b4bf941 4458 if (!BARRIER_P (from)
3c030e88
JH
4459 && (bb2 = BLOCK_FOR_INSN (from)))
4460 {
a813c111 4461 if (BB_END (bb2) == to)
1130d5e3 4462 BB_END (bb2) = prev;
6fb5fa3c 4463 df_set_bb_dirty (bb2);
3c030e88
JH
4464 }
4465
a813c111 4466 if (BB_END (bb) == after)
1130d5e3 4467 BB_END (bb) = to;
3c030e88
JH
4468
4469 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4470 if (!BARRIER_P (x))
63642d5a 4471 df_insn_change_bb (x, bb);
3c030e88
JH
4472 }
4473}
4474
23b2ce53 4475\f
2f937369
DM
4476/* Emit insn(s) of given code and pattern
4477 at a specified place within the doubly-linked list.
23b2ce53 4478
2f937369
DM
4479 All of the emit_foo global entry points accept an object
4480 X which is either an insn list or a PATTERN of a single
4481 instruction.
23b2ce53 4482
2f937369
DM
4483 There are thus a few canonical ways to generate code and
4484 emit it at a specific place in the instruction stream. For
4485 example, consider the instruction named SPOT and the fact that
4486 we would like to emit some instructions before SPOT. We might
4487 do it like this:
23b2ce53 4488
2f937369
DM
4489 start_sequence ();
4490 ... emit the new instructions ...
4491 insns_head = get_insns ();
4492 end_sequence ();
23b2ce53 4493
2f937369 4494 emit_insn_before (insns_head, SPOT);
23b2ce53 4495
2f937369
DM
4496 It used to be common to generate SEQUENCE rtl instead, but that
4497 is a relic of the past which no longer occurs. The reason is that
4498 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4499 generated would almost certainly die right after it was created. */
23b2ce53 4500
cd459bf8 4501static rtx_insn *
5f02387d 4502emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
167b9fae 4503 rtx_insn *(*make_raw) (rtx))
23b2ce53 4504{
167b9fae 4505 rtx_insn *insn;
23b2ce53 4506
5b0264cb 4507 gcc_assert (before);
2f937369
DM
4508
4509 if (x == NULL_RTX)
cd459bf8 4510 return safe_as_a <rtx_insn *> (last);
2f937369
DM
4511
4512 switch (GET_CODE (x))
23b2ce53 4513 {
b5b8b0ac 4514 case DEBUG_INSN:
2f937369
DM
4515 case INSN:
4516 case JUMP_INSN:
4517 case CALL_INSN:
4518 case CODE_LABEL:
4519 case BARRIER:
4520 case NOTE:
167b9fae 4521 insn = as_a <rtx_insn *> (x);
2f937369
DM
4522 while (insn)
4523 {
167b9fae 4524 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4525 add_insn_before (insn, before, bb);
2f937369
DM
4526 last = insn;
4527 insn = next;
4528 }
4529 break;
4530
4531#ifdef ENABLE_RTL_CHECKING
4532 case SEQUENCE:
5b0264cb 4533 gcc_unreachable ();
2f937369
DM
4534 break;
4535#endif
4536
4537 default:
5f02387d 4538 last = (*make_raw) (x);
6fb5fa3c 4539 add_insn_before (last, before, bb);
2f937369 4540 break;
23b2ce53
RS
4541 }
4542
cd459bf8 4543 return safe_as_a <rtx_insn *> (last);
23b2ce53
RS
4544}
4545
5f02387d
NF
4546/* Make X be output before the instruction BEFORE. */
4547
cd459bf8 4548rtx_insn *
596f2b17 4549emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f02387d
NF
4550{
4551 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4552}
4553
2f937369 4554/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4555 and output it before the instruction BEFORE. */
4556
1476d1bd 4557rtx_jump_insn *
596f2b17 4558emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
23b2ce53 4559{
1476d1bd
MM
4560 return as_a <rtx_jump_insn *> (
4561 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4562 make_jump_insn_raw));
23b2ce53
RS
4563}
4564
2f937369 4565/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4566 and output it before the instruction BEFORE. */
4567
cd459bf8 4568rtx_insn *
596f2b17 4569emit_call_insn_before_noloc (rtx x, rtx_insn *before)
969d70ca 4570{
5f02387d
NF
4571 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4572 make_call_insn_raw);
969d70ca
JH
4573}
4574
b5b8b0ac
AO
4575/* Make an instruction with body X and code DEBUG_INSN
4576 and output it before the instruction BEFORE. */
4577
cd459bf8 4578rtx_insn *
b5b8b0ac
AO
4579emit_debug_insn_before_noloc (rtx x, rtx before)
4580{
5f02387d
NF
4581 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4582 make_debug_insn_raw);
b5b8b0ac
AO
4583}
4584
23b2ce53 4585/* Make an insn of code BARRIER
e881bb1b 4586 and output it before the insn BEFORE. */
23b2ce53 4587
cd459bf8 4588rtx_barrier *
502b8322 4589emit_barrier_before (rtx before)
23b2ce53 4590{
cd459bf8 4591 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4592
4593 INSN_UID (insn) = cur_insn_uid++;
4594
6fb5fa3c 4595 add_insn_before (insn, before, NULL);
23b2ce53
RS
4596 return insn;
4597}
4598
e881bb1b
RH
4599/* Emit the label LABEL before the insn BEFORE. */
4600
1476d1bd 4601rtx_code_label *
596f2b17 4602emit_label_before (rtx label, rtx_insn *before)
e881bb1b 4603{
468660d3
SB
4604 gcc_checking_assert (INSN_UID (label) == 0);
4605 INSN_UID (label) = cur_insn_uid++;
4606 add_insn_before (label, before, NULL);
1476d1bd 4607 return as_a <rtx_code_label *> (label);
e881bb1b 4608}
23b2ce53 4609\f
2f937369
DM
4610/* Helper for emit_insn_after, handles lists of instructions
4611 efficiently. */
23b2ce53 4612
e6eda746
DM
4613static rtx_insn *
4614emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
23b2ce53 4615{
e6eda746 4616 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
1130d5e3
DM
4617 rtx_insn *last;
4618 rtx_insn *after_after;
6fb5fa3c
DB
4619 if (!bb && !BARRIER_P (after))
4620 bb = BLOCK_FOR_INSN (after);
23b2ce53 4621
6fb5fa3c 4622 if (bb)
23b2ce53 4623 {
6fb5fa3c 4624 df_set_bb_dirty (bb);
2f937369 4625 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4626 if (!BARRIER_P (last))
6fb5fa3c
DB
4627 {
4628 set_block_for_insn (last, bb);
4629 df_insn_rescan (last);
4630 }
4b4bf941 4631 if (!BARRIER_P (last))
6fb5fa3c
DB
4632 {
4633 set_block_for_insn (last, bb);
4634 df_insn_rescan (last);
4635 }
a813c111 4636 if (BB_END (bb) == after)
1130d5e3 4637 BB_END (bb) = last;
23b2ce53
RS
4638 }
4639 else
2f937369
DM
4640 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4641 continue;
4642
4643 after_after = NEXT_INSN (after);
4644
0f82e5c9
DM
4645 SET_NEXT_INSN (after) = first;
4646 SET_PREV_INSN (first) = after;
4647 SET_NEXT_INSN (last) = after_after;
2f937369 4648 if (after_after)
0f82e5c9 4649 SET_PREV_INSN (after_after) = last;
2f937369 4650
c3284718 4651 if (after == get_last_insn ())
5936d944 4652 set_last_insn (last);
e855c69d 4653
2f937369
DM
4654 return last;
4655}
4656
cd459bf8 4657static rtx_insn *
e6eda746 4658emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
167b9fae 4659 rtx_insn *(*make_raw)(rtx))
2f937369 4660{
e6eda746
DM
4661 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4662 rtx_insn *last = after;
2f937369 4663
5b0264cb 4664 gcc_assert (after);
2f937369
DM
4665
4666 if (x == NULL_RTX)
e6eda746 4667 return last;
2f937369
DM
4668
4669 switch (GET_CODE (x))
23b2ce53 4670 {
b5b8b0ac 4671 case DEBUG_INSN:
2f937369
DM
4672 case INSN:
4673 case JUMP_INSN:
4674 case CALL_INSN:
4675 case CODE_LABEL:
4676 case BARRIER:
4677 case NOTE:
1130d5e3 4678 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4679 break;
4680
4681#ifdef ENABLE_RTL_CHECKING
4682 case SEQUENCE:
5b0264cb 4683 gcc_unreachable ();
2f937369
DM
4684 break;
4685#endif
4686
4687 default:
5f02387d 4688 last = (*make_raw) (x);
6fb5fa3c 4689 add_insn_after (last, after, bb);
2f937369 4690 break;
23b2ce53
RS
4691 }
4692
e6eda746 4693 return last;
23b2ce53
RS
4694}
4695
5f02387d
NF
4696/* Make X be output after the insn AFTER and set the BB of insn. If
4697 BB is NULL, an attempt is made to infer the BB from AFTER. */
4698
cd459bf8 4699rtx_insn *
5f02387d
NF
4700emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4701{
4702 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4703}
4704
255680cf 4705
2f937369 4706/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4707 and output it after the insn AFTER. */
4708
1476d1bd 4709rtx_jump_insn *
a7102479 4710emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4711{
1476d1bd
MM
4712 return as_a <rtx_jump_insn *> (
4713 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
2f937369
DM
4714}
4715
4716/* Make an instruction with body X and code CALL_INSN
4717 and output it after the instruction AFTER. */
4718
cd459bf8 4719rtx_insn *
a7102479 4720emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4721{
5f02387d 4722 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4723}
4724
b5b8b0ac
AO
4725/* Make an instruction with body X and code CALL_INSN
4726 and output it after the instruction AFTER. */
4727
cd459bf8 4728rtx_insn *
b5b8b0ac
AO
4729emit_debug_insn_after_noloc (rtx x, rtx after)
4730{
5f02387d 4731 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4732}
4733
23b2ce53
RS
4734/* Make an insn of code BARRIER
4735 and output it after the insn AFTER. */
4736
cd459bf8 4737rtx_barrier *
502b8322 4738emit_barrier_after (rtx after)
23b2ce53 4739{
cd459bf8 4740 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4741
4742 INSN_UID (insn) = cur_insn_uid++;
4743
6fb5fa3c 4744 add_insn_after (insn, after, NULL);
23b2ce53
RS
4745 return insn;
4746}
4747
4748/* Emit the label LABEL after the insn AFTER. */
4749
cd459bf8 4750rtx_insn *
596f2b17 4751emit_label_after (rtx label, rtx_insn *after)
23b2ce53 4752{
468660d3
SB
4753 gcc_checking_assert (INSN_UID (label) == 0);
4754 INSN_UID (label) = cur_insn_uid++;
4755 add_insn_after (label, after, NULL);
cd459bf8 4756 return as_a <rtx_insn *> (label);
23b2ce53 4757}
96fba521
SB
4758\f
4759/* Notes require a bit of special handling: Some notes need to have their
4760 BLOCK_FOR_INSN set, others should never have it set, and some should
4761 have it set or clear depending on the context. */
4762
4763/* Return true iff a note of kind SUBTYPE should be emitted with routines
4764 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4765 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4766
4767static bool
4768note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4769{
4770 switch (subtype)
4771 {
4772 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4773 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4774 return true;
4775
4776 /* Notes for var tracking and EH region markers can appear between or
4777 inside basic blocks. If the caller is emitting on the basic block
4778 boundary, do not set BLOCK_FOR_INSN on the new note. */
4779 case NOTE_INSN_VAR_LOCATION:
4780 case NOTE_INSN_CALL_ARG_LOCATION:
4781 case NOTE_INSN_EH_REGION_BEG:
4782 case NOTE_INSN_EH_REGION_END:
4783 return on_bb_boundary_p;
4784
4785 /* Otherwise, BLOCK_FOR_INSN must be set. */
4786 default:
4787 return false;
4788 }
4789}
23b2ce53
RS
4790
4791/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4792
66e8df53 4793rtx_note *
589e43f9 4794emit_note_after (enum insn_note subtype, rtx_insn *after)
23b2ce53 4795{
66e8df53 4796 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4797 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4798 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4799
4800 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4801 add_insn_after_nobb (note, after);
4802 else
4803 add_insn_after (note, after, bb);
4804 return note;
4805}
4806
4807/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4808
66e8df53 4809rtx_note *
89b6250d 4810emit_note_before (enum insn_note subtype, rtx_insn *before)
96fba521 4811{
66e8df53 4812 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4813 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4814 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4815
4816 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4817 add_insn_before_nobb (note, before);
4818 else
4819 add_insn_before (note, before, bb);
23b2ce53
RS
4820 return note;
4821}
23b2ce53 4822\f
e8110d6f
NF
4823/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4824 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4825
cd459bf8 4826static rtx_insn *
dc01c3d1 4827emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
167b9fae 4828 rtx_insn *(*make_raw) (rtx))
0d682900 4829{
dc01c3d1 4830 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
e67d1102 4831 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4832
a7102479 4833 if (pattern == NULL_RTX || !loc)
e67d1102 4834 return last;
dd3adcf8 4835
2f937369
DM
4836 after = NEXT_INSN (after);
4837 while (1)
4838 {
20d4397a
EB
4839 if (active_insn_p (after)
4840 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4841 && !INSN_LOCATION (after))
5368224f 4842 INSN_LOCATION (after) = loc;
2f937369
DM
4843 if (after == last)
4844 break;
4845 after = NEXT_INSN (after);
4846 }
e67d1102 4847 return last;
0d682900
JH
4848}
4849
e8110d6f
NF
4850/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4851 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4852 any DEBUG_INSNs. */
4853
cd459bf8 4854static rtx_insn *
dc01c3d1 4855emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
167b9fae 4856 rtx_insn *(*make_raw) (rtx))
a7102479 4857{
dc01c3d1
DM
4858 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4859 rtx_insn *prev = after;
b5b8b0ac 4860
e8110d6f
NF
4861 if (skip_debug_insns)
4862 while (DEBUG_INSN_P (prev))
4863 prev = PREV_INSN (prev);
b5b8b0ac
AO
4864
4865 if (INSN_P (prev))
5368224f 4866 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4867 make_raw);
a7102479 4868 else
e8110d6f 4869 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4870}
4871
5368224f 4872/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4873rtx_insn *
e8110d6f 4874emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4875{
e8110d6f
NF
4876 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4877}
2f937369 4878
5368224f 4879/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4880rtx_insn *
e8110d6f
NF
4881emit_insn_after (rtx pattern, rtx after)
4882{
4883 return emit_pattern_after (pattern, after, true, make_insn_raw);
4884}
dd3adcf8 4885
5368224f 4886/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4887rtx_jump_insn *
e8110d6f
NF
4888emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4889{
1476d1bd
MM
4890 return as_a <rtx_jump_insn *> (
4891 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
0d682900
JH
4892}
4893
5368224f 4894/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
1476d1bd 4895rtx_jump_insn *
a7102479
JH
4896emit_jump_insn_after (rtx pattern, rtx after)
4897{
1476d1bd
MM
4898 return as_a <rtx_jump_insn *> (
4899 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
a7102479
JH
4900}
4901
5368224f 4902/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4903rtx_insn *
502b8322 4904emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4905{
e8110d6f 4906 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4907}
4908
5368224f 4909/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4910rtx_insn *
a7102479
JH
4911emit_call_insn_after (rtx pattern, rtx after)
4912{
e8110d6f 4913 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4914}
4915
5368224f 4916/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4917rtx_insn *
b5b8b0ac
AO
4918emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4919{
e8110d6f 4920 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4921}
4922
5368224f 4923/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4924rtx_insn *
b5b8b0ac
AO
4925emit_debug_insn_after (rtx pattern, rtx after)
4926{
e8110d6f 4927 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4928}
4929
e8110d6f
NF
4930/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4931 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4932 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4933 CALL_INSN, etc. */
4934
cd459bf8 4935static rtx_insn *
dc01c3d1 4936emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
167b9fae 4937 rtx_insn *(*make_raw) (rtx))
0d682900 4938{
dc01c3d1
DM
4939 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4940 rtx_insn *first = PREV_INSN (before);
4941 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4942 insnp ? before : NULL_RTX,
4943 NULL, make_raw);
a7102479
JH
4944
4945 if (pattern == NULL_RTX || !loc)
dc01c3d1 4946 return last;
a7102479 4947
26cb3993
JH
4948 if (!first)
4949 first = get_insns ();
4950 else
4951 first = NEXT_INSN (first);
a7102479
JH
4952 while (1)
4953 {
20d4397a
EB
4954 if (active_insn_p (first)
4955 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4956 && !INSN_LOCATION (first))
5368224f 4957 INSN_LOCATION (first) = loc;
a7102479
JH
4958 if (first == last)
4959 break;
4960 first = NEXT_INSN (first);
4961 }
dc01c3d1 4962 return last;
a7102479
JH
4963}
4964
e8110d6f
NF
4965/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4966 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4967 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4968 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4969
cd459bf8 4970static rtx_insn *
dc01c3d1 4971emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
167b9fae 4972 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 4973{
dc01c3d1
DM
4974 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4975 rtx_insn *next = before;
b5b8b0ac 4976
e8110d6f
NF
4977 if (skip_debug_insns)
4978 while (DEBUG_INSN_P (next))
4979 next = PREV_INSN (next);
b5b8b0ac
AO
4980
4981 if (INSN_P (next))
5368224f 4982 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4983 insnp, make_raw);
a7102479 4984 else
e8110d6f 4985 return emit_pattern_before_noloc (pattern, before,
9b2ea071 4986 insnp ? before : NULL_RTX,
e8110d6f 4987 NULL, make_raw);
a7102479
JH
4988}
4989
5368224f 4990/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4991rtx_insn *
596f2b17 4992emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4993{
e8110d6f
NF
4994 return emit_pattern_before_setloc (pattern, before, loc, true,
4995 make_insn_raw);
4996}
a7102479 4997
5368224f 4998/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4999rtx_insn *
e8110d6f
NF
5000emit_insn_before (rtx pattern, rtx before)
5001{
5002 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
5003}
a7102479 5004
5368224f 5005/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 5006rtx_jump_insn *
596f2b17 5007emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
e8110d6f 5008{
1476d1bd
MM
5009 return as_a <rtx_jump_insn *> (
5010 emit_pattern_before_setloc (pattern, before, loc, false,
5011 make_jump_insn_raw));
a7102479
JH
5012}
5013
5368224f 5014/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
1476d1bd 5015rtx_jump_insn *
a7102479
JH
5016emit_jump_insn_before (rtx pattern, rtx before)
5017{
1476d1bd
MM
5018 return as_a <rtx_jump_insn *> (
5019 emit_pattern_before (pattern, before, true, false,
5020 make_jump_insn_raw));
a7102479
JH
5021}
5022
5368224f 5023/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 5024rtx_insn *
596f2b17 5025emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 5026{
e8110d6f
NF
5027 return emit_pattern_before_setloc (pattern, before, loc, false,
5028 make_call_insn_raw);
0d682900 5029}
a7102479 5030
e8110d6f 5031/* Like emit_call_insn_before_noloc,
5368224f 5032 but set insn_location according to BEFORE. */
cd459bf8 5033rtx_insn *
596f2b17 5034emit_call_insn_before (rtx pattern, rtx_insn *before)
a7102479 5035{
e8110d6f
NF
5036 return emit_pattern_before (pattern, before, true, false,
5037 make_call_insn_raw);
a7102479 5038}
b5b8b0ac 5039
5368224f 5040/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 5041rtx_insn *
b5b8b0ac
AO
5042emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
5043{
e8110d6f
NF
5044 return emit_pattern_before_setloc (pattern, before, loc, false,
5045 make_debug_insn_raw);
b5b8b0ac
AO
5046}
5047
e8110d6f 5048/* Like emit_debug_insn_before_noloc,
5368224f 5049 but set insn_location according to BEFORE. */
cd459bf8 5050rtx_insn *
3a6216b0 5051emit_debug_insn_before (rtx pattern, rtx_insn *before)
b5b8b0ac 5052{
e8110d6f
NF
5053 return emit_pattern_before (pattern, before, false, false,
5054 make_debug_insn_raw);
b5b8b0ac 5055}
0d682900 5056\f
2f937369
DM
5057/* Take X and emit it at the end of the doubly-linked
5058 INSN list.
23b2ce53
RS
5059
5060 Returns the last insn emitted. */
5061
cd459bf8 5062rtx_insn *
502b8322 5063emit_insn (rtx x)
23b2ce53 5064{
cd459bf8
DM
5065 rtx_insn *last = get_last_insn ();
5066 rtx_insn *insn;
23b2ce53 5067
2f937369
DM
5068 if (x == NULL_RTX)
5069 return last;
23b2ce53 5070
2f937369
DM
5071 switch (GET_CODE (x))
5072 {
b5b8b0ac 5073 case DEBUG_INSN:
2f937369
DM
5074 case INSN:
5075 case JUMP_INSN:
5076 case CALL_INSN:
5077 case CODE_LABEL:
5078 case BARRIER:
5079 case NOTE:
cd459bf8 5080 insn = as_a <rtx_insn *> (x);
2f937369 5081 while (insn)
23b2ce53 5082 {
cd459bf8 5083 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 5084 add_insn (insn);
2f937369
DM
5085 last = insn;
5086 insn = next;
23b2ce53 5087 }
2f937369 5088 break;
23b2ce53 5089
2f937369 5090#ifdef ENABLE_RTL_CHECKING
39718607 5091 case JUMP_TABLE_DATA:
2f937369 5092 case SEQUENCE:
5b0264cb 5093 gcc_unreachable ();
2f937369
DM
5094 break;
5095#endif
23b2ce53 5096
2f937369
DM
5097 default:
5098 last = make_insn_raw (x);
5099 add_insn (last);
5100 break;
23b2ce53
RS
5101 }
5102
5103 return last;
5104}
5105
b5b8b0ac
AO
5106/* Make an insn of code DEBUG_INSN with pattern X
5107 and add it to the end of the doubly-linked list. */
5108
cd459bf8 5109rtx_insn *
b5b8b0ac
AO
5110emit_debug_insn (rtx x)
5111{
cd459bf8
DM
5112 rtx_insn *last = get_last_insn ();
5113 rtx_insn *insn;
b5b8b0ac
AO
5114
5115 if (x == NULL_RTX)
5116 return last;
5117
5118 switch (GET_CODE (x))
5119 {
5120 case DEBUG_INSN:
5121 case INSN:
5122 case JUMP_INSN:
5123 case CALL_INSN:
5124 case CODE_LABEL:
5125 case BARRIER:
5126 case NOTE:
cd459bf8 5127 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
5128 while (insn)
5129 {
cd459bf8 5130 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
5131 add_insn (insn);
5132 last = insn;
5133 insn = next;
5134 }
5135 break;
5136
5137#ifdef ENABLE_RTL_CHECKING
39718607 5138 case JUMP_TABLE_DATA:
b5b8b0ac
AO
5139 case SEQUENCE:
5140 gcc_unreachable ();
5141 break;
5142#endif
5143
5144 default:
5145 last = make_debug_insn_raw (x);
5146 add_insn (last);
5147 break;
5148 }
5149
5150 return last;
5151}
5152
2f937369
DM
5153/* Make an insn of code JUMP_INSN with pattern X
5154 and add it to the end of the doubly-linked list. */
23b2ce53 5155
cd459bf8 5156rtx_insn *
502b8322 5157emit_jump_insn (rtx x)
23b2ce53 5158{
cd459bf8
DM
5159 rtx_insn *last = NULL;
5160 rtx_insn *insn;
23b2ce53 5161
2f937369 5162 switch (GET_CODE (x))
23b2ce53 5163 {
b5b8b0ac 5164 case DEBUG_INSN:
2f937369
DM
5165 case INSN:
5166 case JUMP_INSN:
5167 case CALL_INSN:
5168 case CODE_LABEL:
5169 case BARRIER:
5170 case NOTE:
cd459bf8 5171 insn = as_a <rtx_insn *> (x);
2f937369
DM
5172 while (insn)
5173 {
cd459bf8 5174 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5175 add_insn (insn);
5176 last = insn;
5177 insn = next;
5178 }
5179 break;
e0a5c5eb 5180
2f937369 5181#ifdef ENABLE_RTL_CHECKING
39718607 5182 case JUMP_TABLE_DATA:
2f937369 5183 case SEQUENCE:
5b0264cb 5184 gcc_unreachable ();
2f937369
DM
5185 break;
5186#endif
e0a5c5eb 5187
2f937369
DM
5188 default:
5189 last = make_jump_insn_raw (x);
5190 add_insn (last);
5191 break;
3c030e88 5192 }
e0a5c5eb
RS
5193
5194 return last;
5195}
5196
2f937369 5197/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5198 and add it to the end of the doubly-linked list. */
5199
cd459bf8 5200rtx_insn *
502b8322 5201emit_call_insn (rtx x)
23b2ce53 5202{
cd459bf8 5203 rtx_insn *insn;
2f937369
DM
5204
5205 switch (GET_CODE (x))
23b2ce53 5206 {
b5b8b0ac 5207 case DEBUG_INSN:
2f937369
DM
5208 case INSN:
5209 case JUMP_INSN:
5210 case CALL_INSN:
5211 case CODE_LABEL:
5212 case BARRIER:
5213 case NOTE:
5214 insn = emit_insn (x);
5215 break;
23b2ce53 5216
2f937369
DM
5217#ifdef ENABLE_RTL_CHECKING
5218 case SEQUENCE:
39718607 5219 case JUMP_TABLE_DATA:
5b0264cb 5220 gcc_unreachable ();
2f937369
DM
5221 break;
5222#endif
23b2ce53 5223
2f937369
DM
5224 default:
5225 insn = make_call_insn_raw (x);
23b2ce53 5226 add_insn (insn);
2f937369 5227 break;
23b2ce53 5228 }
2f937369
DM
5229
5230 return insn;
23b2ce53
RS
5231}
5232
5233/* Add the label LABEL to the end of the doubly-linked list. */
5234
1476d1bd
MM
5235rtx_code_label *
5236emit_label (rtx uncast_label)
23b2ce53 5237{
1476d1bd
MM
5238 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5239
468660d3
SB
5240 gcc_checking_assert (INSN_UID (label) == 0);
5241 INSN_UID (label) = cur_insn_uid++;
1476d1bd
MM
5242 add_insn (label);
5243 return label;
23b2ce53
RS
5244}
5245
39718607
SB
5246/* Make an insn of code JUMP_TABLE_DATA
5247 and add it to the end of the doubly-linked list. */
5248
4598afdd 5249rtx_jump_table_data *
39718607
SB
5250emit_jump_table_data (rtx table)
5251{
4598afdd
DM
5252 rtx_jump_table_data *jump_table_data =
5253 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5254 INSN_UID (jump_table_data) = cur_insn_uid++;
5255 PATTERN (jump_table_data) = table;
5256 BLOCK_FOR_INSN (jump_table_data) = NULL;
5257 add_insn (jump_table_data);
5258 return jump_table_data;
5259}
5260
23b2ce53
RS
5261/* Make an insn of code BARRIER
5262 and add it to the end of the doubly-linked list. */
5263
cd459bf8 5264rtx_barrier *
502b8322 5265emit_barrier (void)
23b2ce53 5266{
cd459bf8 5267 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5268 INSN_UID (barrier) = cur_insn_uid++;
5269 add_insn (barrier);
5270 return barrier;
5271}
5272
5f2fc772 5273/* Emit a copy of note ORIG. */
502b8322 5274
66e8df53
DM
5275rtx_note *
5276emit_note_copy (rtx_note *orig)
5f2fc772 5277{
96fba521 5278 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5279 rtx_note *note = make_note_raw (kind);
5f2fc772 5280 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5281 add_insn (note);
2e040219 5282 return note;
23b2ce53
RS
5283}
5284
2e040219
NS
5285/* Make an insn of code NOTE or type NOTE_NO
5286 and add it to the end of the doubly-linked list. */
23b2ce53 5287
66e8df53 5288rtx_note *
a38e7aa5 5289emit_note (enum insn_note kind)
23b2ce53 5290{
66e8df53 5291 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5292 add_insn (note);
5293 return note;
5294}
5295
c41c1387
RS
5296/* Emit a clobber of lvalue X. */
5297
cd459bf8 5298rtx_insn *
c41c1387
RS
5299emit_clobber (rtx x)
5300{
5301 /* CONCATs should not appear in the insn stream. */
5302 if (GET_CODE (x) == CONCAT)
5303 {
5304 emit_clobber (XEXP (x, 0));
5305 return emit_clobber (XEXP (x, 1));
5306 }
5307 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5308}
5309
5310/* Return a sequence of insns to clobber lvalue X. */
5311
cd459bf8 5312rtx_insn *
c41c1387
RS
5313gen_clobber (rtx x)
5314{
cd459bf8 5315 rtx_insn *seq;
c41c1387
RS
5316
5317 start_sequence ();
5318 emit_clobber (x);
5319 seq = get_insns ();
5320 end_sequence ();
5321 return seq;
5322}
5323
5324/* Emit a use of rvalue X. */
5325
cd459bf8 5326rtx_insn *
c41c1387
RS
5327emit_use (rtx x)
5328{
5329 /* CONCATs should not appear in the insn stream. */
5330 if (GET_CODE (x) == CONCAT)
5331 {
5332 emit_use (XEXP (x, 0));
5333 return emit_use (XEXP (x, 1));
5334 }
5335 return emit_insn (gen_rtx_USE (VOIDmode, x));
5336}
5337
5338/* Return a sequence of insns to use rvalue X. */
5339
cd459bf8 5340rtx_insn *
c41c1387
RS
5341gen_use (rtx x)
5342{
cd459bf8 5343 rtx_insn *seq;
c41c1387
RS
5344
5345 start_sequence ();
5346 emit_use (x);
5347 seq = get_insns ();
5348 end_sequence ();
5349 return seq;
5350}
5351
c8912e53
RS
5352/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5353 Return the set in INSN that such notes describe, or NULL if the notes
5354 have no meaning for INSN. */
5355
5356rtx
5357set_for_reg_notes (rtx insn)
5358{
5359 rtx pat, reg;
5360
5361 if (!INSN_P (insn))
5362 return NULL_RTX;
5363
5364 pat = PATTERN (insn);
5365 if (GET_CODE (pat) == PARALLEL)
5366 {
5367 /* We do not use single_set because that ignores SETs of unused
5368 registers. REG_EQUAL and REG_EQUIV notes really do require the
5369 PARALLEL to have a single SET. */
5370 if (multiple_sets (insn))
5371 return NULL_RTX;
5372 pat = XVECEXP (pat, 0, 0);
5373 }
5374
5375 if (GET_CODE (pat) != SET)
5376 return NULL_RTX;
5377
5378 reg = SET_DEST (pat);
5379
5380 /* Notes apply to the contents of a STRICT_LOW_PART. */
7f7379f6
KV
5381 if (GET_CODE (reg) == STRICT_LOW_PART
5382 || GET_CODE (reg) == ZERO_EXTRACT)
c8912e53
RS
5383 reg = XEXP (reg, 0);
5384
5385 /* Check that we have a register. */
5386 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5387 return NULL_RTX;
5388
5389 return pat;
5390}
5391
87b47c85 5392/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5393 note of this type already exists, remove it first. */
87b47c85 5394
3d238248 5395rtx
502b8322 5396set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5397{
5398 rtx note = find_reg_note (insn, kind, NULL_RTX);
5399
52488da1
JW
5400 switch (kind)
5401 {
5402 case REG_EQUAL:
5403 case REG_EQUIV:
8073cbd4
EB
5404 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5405 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
c8912e53 5406 return NULL_RTX;
52488da1
JW
5407
5408 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5409 It serves no useful purpose and breaks eliminate_regs. */
5410 if (GET_CODE (datum) == ASM_OPERANDS)
5411 return NULL_RTX;
109374e2
RS
5412
5413 /* Notes with side effects are dangerous. Even if the side-effect
5414 initially mirrors one in PATTERN (INSN), later optimizations
5415 might alter the way that the final register value is calculated
5416 and so move or alter the side-effect in some way. The note would
5417 then no longer be a valid substitution for SET_SRC. */
5418 if (side_effects_p (datum))
5419 return NULL_RTX;
52488da1
JW
5420 break;
5421
5422 default:
5423 break;
5424 }
3d238248 5425
c8912e53
RS
5426 if (note)
5427 XEXP (note, 0) = datum;
5428 else
5429 {
5430 add_reg_note (insn, kind, datum);
5431 note = REG_NOTES (insn);
5432 }
6fb5fa3c
DB
5433
5434 switch (kind)
3d238248 5435 {
6fb5fa3c
DB
5436 case REG_EQUAL:
5437 case REG_EQUIV:
b2908ba6 5438 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5439 break;
5440 default:
5441 break;
3d238248 5442 }
87b47c85 5443
c8912e53 5444 return note;
87b47c85 5445}
7543f918
JR
5446
5447/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5448rtx
5449set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5450{
c8912e53 5451 rtx set = set_for_reg_notes (insn);
7543f918
JR
5452
5453 if (set && SET_DEST (set) == dst)
5454 return set_unique_reg_note (insn, kind, datum);
5455 return NULL_RTX;
5456}
23b2ce53 5457\f
9d8895c9
RS
5458/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5459 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5460 is true.
5461
23b2ce53
RS
5462 If X is a label, it is simply added into the insn chain. */
5463
cd459bf8 5464rtx_insn *
9d8895c9 5465emit (rtx x, bool allow_barrier_p)
23b2ce53
RS
5466{
5467 enum rtx_code code = classify_insn (x);
5468
5b0264cb 5469 switch (code)
23b2ce53 5470 {
5b0264cb
NS
5471 case CODE_LABEL:
5472 return emit_label (x);
5473 case INSN:
5474 return emit_insn (x);
5475 case JUMP_INSN:
5476 {
cd459bf8 5477 rtx_insn *insn = emit_jump_insn (x);
9d8895c9
RS
5478 if (allow_barrier_p
5479 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5b0264cb
NS
5480 return emit_barrier ();
5481 return insn;
5482 }
5483 case CALL_INSN:
5484 return emit_call_insn (x);
b5b8b0ac
AO
5485 case DEBUG_INSN:
5486 return emit_debug_insn (x);
5b0264cb
NS
5487 default:
5488 gcc_unreachable ();
23b2ce53 5489 }
23b2ce53
RS
5490}
5491\f
e2500fed 5492/* Space for free sequence stack entries. */
1431042e 5493static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5494
4dfa0342
RH
5495/* Begin emitting insns to a sequence. If this sequence will contain
5496 something that might cause the compiler to pop arguments to function
5497 calls (because those pops have previously been deferred; see
5498 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5499 before calling this function. That will ensure that the deferred
5500 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5501
5502void
502b8322 5503start_sequence (void)
23b2ce53
RS
5504{
5505 struct sequence_stack *tem;
5506
e2500fed
GK
5507 if (free_sequence_stack != NULL)
5508 {
5509 tem = free_sequence_stack;
5510 free_sequence_stack = tem->next;
5511 }
5512 else
766090c2 5513 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5514
614d5bd8 5515 tem->next = get_current_sequence ()->next;
5936d944
JH
5516 tem->first = get_insns ();
5517 tem->last = get_last_insn ();
614d5bd8 5518 get_current_sequence ()->next = tem;
23b2ce53 5519
5936d944
JH
5520 set_first_insn (0);
5521 set_last_insn (0);
23b2ce53
RS
5522}
5523
5c7a310f
MM
5524/* Set up the insn chain starting with FIRST as the current sequence,
5525 saving the previously current one. See the documentation for
5526 start_sequence for more information about how to use this function. */
23b2ce53
RS
5527
5528void
fee3e72c 5529push_to_sequence (rtx_insn *first)
23b2ce53 5530{
fee3e72c 5531 rtx_insn *last;
23b2ce53
RS
5532
5533 start_sequence ();
5534
e84a58ff
EB
5535 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5536 ;
23b2ce53 5537
5936d944
JH
5538 set_first_insn (first);
5539 set_last_insn (last);
23b2ce53
RS
5540}
5541
bb27eeda
SE
5542/* Like push_to_sequence, but take the last insn as an argument to avoid
5543 looping through the list. */
5544
5545void
fee3e72c 5546push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5547{
5548 start_sequence ();
5549
5936d944
JH
5550 set_first_insn (first);
5551 set_last_insn (last);
bb27eeda
SE
5552}
5553
f15ae3a1
TW
5554/* Set up the outer-level insn chain
5555 as the current sequence, saving the previously current one. */
5556
5557void
502b8322 5558push_topmost_sequence (void)
f15ae3a1 5559{
614d5bd8 5560 struct sequence_stack *top;
f15ae3a1
TW
5561
5562 start_sequence ();
5563
614d5bd8 5564 top = get_topmost_sequence ();
5936d944
JH
5565 set_first_insn (top->first);
5566 set_last_insn (top->last);
f15ae3a1
TW
5567}
5568
5569/* After emitting to the outer-level insn chain, update the outer-level
5570 insn chain, and restore the previous saved state. */
5571
5572void
502b8322 5573pop_topmost_sequence (void)
f15ae3a1 5574{
614d5bd8 5575 struct sequence_stack *top;
f15ae3a1 5576
614d5bd8 5577 top = get_topmost_sequence ();
5936d944
JH
5578 top->first = get_insns ();
5579 top->last = get_last_insn ();
f15ae3a1
TW
5580
5581 end_sequence ();
5582}
5583
23b2ce53
RS
5584/* After emitting to a sequence, restore previous saved state.
5585
5c7a310f 5586 To get the contents of the sequence just made, you must call
2f937369 5587 `get_insns' *before* calling here.
5c7a310f
MM
5588
5589 If the compiler might have deferred popping arguments while
5590 generating this sequence, and this sequence will not be immediately
5591 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5592 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5593 pops are inserted into this sequence, and not into some random
5594 location in the instruction stream. See INHIBIT_DEFER_POP for more
5595 information about deferred popping of arguments. */
23b2ce53
RS
5596
5597void
502b8322 5598end_sequence (void)
23b2ce53 5599{
614d5bd8 5600 struct sequence_stack *tem = get_current_sequence ()->next;
23b2ce53 5601
5936d944
JH
5602 set_first_insn (tem->first);
5603 set_last_insn (tem->last);
614d5bd8 5604 get_current_sequence ()->next = tem->next;
23b2ce53 5605
e2500fed
GK
5606 memset (tem, 0, sizeof (*tem));
5607 tem->next = free_sequence_stack;
5608 free_sequence_stack = tem;
23b2ce53
RS
5609}
5610
5611/* Return 1 if currently emitting into a sequence. */
5612
5613int
502b8322 5614in_sequence_p (void)
23b2ce53 5615{
614d5bd8 5616 return get_current_sequence ()->next != 0;
23b2ce53 5617}
23b2ce53 5618\f
59ec66dc
MM
5619/* Put the various virtual registers into REGNO_REG_RTX. */
5620
2bbdec73 5621static void
bd60bab2 5622init_virtual_regs (void)
59ec66dc 5623{
bd60bab2
JH
5624 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5625 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5626 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5627 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5628 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5629 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5630 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5631}
5632
da43a810
BS
5633\f
5634/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5635static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5636static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5637static int copy_insn_n_scratches;
5638
5639/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5640 copied an ASM_OPERANDS.
5641 In that case, it is the original input-operand vector. */
5642static rtvec orig_asm_operands_vector;
5643
5644/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5645 copied an ASM_OPERANDS.
5646 In that case, it is the copied input-operand vector. */
5647static rtvec copy_asm_operands_vector;
5648
5649/* Likewise for the constraints vector. */
5650static rtvec orig_asm_constraints_vector;
5651static rtvec copy_asm_constraints_vector;
5652
5653/* Recursively create a new copy of an rtx for copy_insn.
5654 This function differs from copy_rtx in that it handles SCRATCHes and
5655 ASM_OPERANDs properly.
5656 Normally, this function is not used directly; use copy_insn as front end.
5657 However, you could first copy an insn pattern with copy_insn and then use
5658 this function afterwards to properly copy any REG_NOTEs containing
5659 SCRATCHes. */
5660
5661rtx
502b8322 5662copy_insn_1 (rtx orig)
da43a810 5663{
b3694847
SS
5664 rtx copy;
5665 int i, j;
5666 RTX_CODE code;
5667 const char *format_ptr;
da43a810 5668
cd9c1ca8
RH
5669 if (orig == NULL)
5670 return NULL;
5671
da43a810
BS
5672 code = GET_CODE (orig);
5673
5674 switch (code)
5675 {
5676 case REG:
a52a87c3 5677 case DEBUG_EXPR:
d8116890 5678 CASE_CONST_ANY:
da43a810
BS
5679 case SYMBOL_REF:
5680 case CODE_LABEL:
5681 case PC:
5682 case CC0:
276e0224 5683 case RETURN:
26898771 5684 case SIMPLE_RETURN:
da43a810 5685 return orig;
3e89ed8d 5686 case CLOBBER:
c5c5ba89
JH
5687 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5688 clobbers or clobbers of hard registers that originated as pseudos.
5689 This is needed to allow safe register renaming. */
d7ae3739
EB
5690 if (REG_P (XEXP (orig, 0))
5691 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5692 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
3e89ed8d
JH
5693 return orig;
5694 break;
da43a810
BS
5695
5696 case SCRATCH:
5697 for (i = 0; i < copy_insn_n_scratches; i++)
5698 if (copy_insn_scratch_in[i] == orig)
5699 return copy_insn_scratch_out[i];
5700 break;
5701
5702 case CONST:
6fb5fa3c 5703 if (shared_const_p (orig))
da43a810
BS
5704 return orig;
5705 break;
750c9258 5706
da43a810
BS
5707 /* A MEM with a constant address is not sharable. The problem is that
5708 the constant address may need to be reloaded. If the mem is shared,
5709 then reloading one copy of this mem will cause all copies to appear
5710 to have been reloaded. */
5711
5712 default:
5713 break;
5714 }
5715
aacd3885
RS
5716 /* Copy the various flags, fields, and other information. We assume
5717 that all fields need copying, and then clear the fields that should
da43a810
BS
5718 not be copied. That is the sensible default behavior, and forces
5719 us to explicitly document why we are *not* copying a flag. */
aacd3885 5720 copy = shallow_copy_rtx (orig);
da43a810 5721
da43a810 5722 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5723 if (INSN_P (orig))
da43a810 5724 {
2adc7f12
JJ
5725 RTX_FLAG (copy, jump) = 0;
5726 RTX_FLAG (copy, call) = 0;
5727 RTX_FLAG (copy, frame_related) = 0;
da43a810 5728 }
750c9258 5729
da43a810
BS
5730 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5731
5732 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5733 switch (*format_ptr++)
5734 {
5735 case 'e':
5736 if (XEXP (orig, i) != NULL)
5737 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5738 break;
da43a810 5739
aacd3885
RS
5740 case 'E':
5741 case 'V':
5742 if (XVEC (orig, i) == orig_asm_constraints_vector)
5743 XVEC (copy, i) = copy_asm_constraints_vector;
5744 else if (XVEC (orig, i) == orig_asm_operands_vector)
5745 XVEC (copy, i) = copy_asm_operands_vector;
5746 else if (XVEC (orig, i) != NULL)
5747 {
5748 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5749 for (j = 0; j < XVECLEN (copy, i); j++)
5750 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5751 }
5752 break;
da43a810 5753
aacd3885
RS
5754 case 't':
5755 case 'w':
5756 case 'i':
91914e56 5757 case 'p':
aacd3885
RS
5758 case 's':
5759 case 'S':
5760 case 'u':
5761 case '0':
5762 /* These are left unchanged. */
5763 break;
da43a810 5764
aacd3885
RS
5765 default:
5766 gcc_unreachable ();
5767 }
da43a810
BS
5768
5769 if (code == SCRATCH)
5770 {
5771 i = copy_insn_n_scratches++;
5b0264cb 5772 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5773 copy_insn_scratch_in[i] = orig;
5774 copy_insn_scratch_out[i] = copy;
5775 }
5776 else if (code == ASM_OPERANDS)
5777 {
6462bb43
AO
5778 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5779 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5780 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5781 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5782 }
5783
5784 return copy;
5785}
5786
5787/* Create a new copy of an rtx.
5788 This function differs from copy_rtx in that it handles SCRATCHes and
5789 ASM_OPERANDs properly.
5790 INSN doesn't really have to be a full INSN; it could be just the
5791 pattern. */
5792rtx
502b8322 5793copy_insn (rtx insn)
da43a810
BS
5794{
5795 copy_insn_n_scratches = 0;
5796 orig_asm_operands_vector = 0;
5797 orig_asm_constraints_vector = 0;
5798 copy_asm_operands_vector = 0;
5799 copy_asm_constraints_vector = 0;
5800 return copy_insn_1 (insn);
5801}
59ec66dc 5802
8e383849
JR
5803/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5804 on that assumption that INSN itself remains in its original place. */
5805
f8f0516e
DM
5806rtx_insn *
5807copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5808{
5809 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5810 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5811 INSN_UID (insn) = cur_insn_uid++;
5812 return insn;
5813}
5814
23b2ce53
RS
5815/* Initialize data structures and variables in this file
5816 before generating rtl for each function. */
5817
5818void
502b8322 5819init_emit (void)
23b2ce53 5820{
5936d944
JH
5821 set_first_insn (NULL);
5822 set_last_insn (NULL);
b5b8b0ac
AO
5823 if (MIN_NONDEBUG_INSN_UID)
5824 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5825 else
5826 cur_insn_uid = 1;
5827 cur_debug_insn_uid = 1;
23b2ce53 5828 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5829 first_label_num = label_num;
614d5bd8 5830 get_current_sequence ()->next = NULL;
23b2ce53 5831
23b2ce53
RS
5832 /* Init the tables that describe all the pseudo regs. */
5833
3e029763 5834 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5835
3e029763 5836 crtl->emit.regno_pointer_align
1b4572a8 5837 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5838
f44986d7
DM
5839 regno_reg_rtx
5840 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5841
e50126e8 5842 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5843 memcpy (regno_reg_rtx,
5fb0e246 5844 initial_regno_reg_rtx,
6cde4876 5845 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5846
23b2ce53 5847 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5848 init_virtual_regs ();
740ab4a2
RK
5849
5850 /* Indicate that the virtual registers and stack locations are
5851 all pointers. */
3502dc9c
JDA
5852 REG_POINTER (stack_pointer_rtx) = 1;
5853 REG_POINTER (frame_pointer_rtx) = 1;
5854 REG_POINTER (hard_frame_pointer_rtx) = 1;
5855 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5856
3502dc9c
JDA
5857 REG_POINTER (virtual_incoming_args_rtx) = 1;
5858 REG_POINTER (virtual_stack_vars_rtx) = 1;
5859 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5860 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5861 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5862
86fe05e0 5863#ifdef STACK_BOUNDARY
bdb429a5
RK
5864 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5865 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5866 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5867 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5868
5869 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5870 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5871 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5872 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
da75ca93 5873
bdb429a5 5874 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5875#endif
5876
5e82e7bd
JVA
5877#ifdef INIT_EXPANDERS
5878 INIT_EXPANDERS;
5879#endif
23b2ce53
RS
5880}
5881
cd5ff7bc
RS
5882/* Return the value of element I of CONST_VECTOR X as a wide_int. */
5883
5884wide_int
5885const_vector_int_elt (const_rtx x, unsigned int i)
5886{
5887 /* First handle elements that are directly encoded. */
5888 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5889 if (i < (unsigned int) XVECLEN (x, 0))
5890 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5891
5892 /* Identify the pattern that contains element I and work out the index of
5893 the last encoded element for that pattern. */
5894 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5895 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5896 unsigned int count = i / npatterns;
5897 unsigned int pattern = i % npatterns;
5898 unsigned int final_i = encoded_nelts - npatterns + pattern;
5899
5900 /* If there are no steps, the final encoded value is the right one. */
5901 if (!CONST_VECTOR_STEPPED_P (x))
5902 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5903
5904 /* Otherwise work out the value from the last two encoded elements. */
5905 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5906 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5907 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5908 rtx_mode_t (v1, elt_mode));
5909 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5910}
5911
5912/* Return the value of element I of CONST_VECTOR X. */
5913
5914rtx
5915const_vector_elt (const_rtx x, unsigned int i)
5916{
5917 /* First handle elements that are directly encoded. */
5918 if (i < (unsigned int) XVECLEN (x, 0))
5919 return CONST_VECTOR_ENCODED_ELT (x, i);
5920
5921 /* If there are no steps, the final encoded value is the right one. */
5922 if (!CONST_VECTOR_STEPPED_P (x))
5923 {
5924 /* Identify the pattern that contains element I and work out the index of
5925 the last encoded element for that pattern. */
5926 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5927 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5928 unsigned int pattern = i % npatterns;
5929 unsigned int final_i = encoded_nelts - npatterns + pattern;
5930 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5931 }
5932
5933 /* Otherwise work out the value from the last two encoded elements. */
5934 return immed_wide_int_const (const_vector_int_elt (x, i),
5935 GET_MODE_INNER (GET_MODE (x)));
5936}
5937
c0cc00c4
JJ
5938/* Return true if X is a valid element for a CONST_VECTOR of the given
5939 mode. */
9b4473b6
RS
5940
5941bool
c0cc00c4 5942valid_for_const_vector_p (machine_mode, rtx x)
9b4473b6
RS
5943{
5944 return (CONST_SCALAR_INT_P (x)
5945 || CONST_DOUBLE_AS_FLOAT_P (x)
5946 || CONST_FIXED_P (x));
5947}
5948
59d06c05
RS
5949/* Generate a vector constant of mode MODE in which every element has
5950 value ELT. */
69ef87e2 5951
59d06c05
RS
5952rtx
5953gen_const_vec_duplicate (machine_mode mode, rtx elt)
5954{
3877c560
RS
5955 rtx_vector_builder builder (mode, 1, 1);
5956 builder.quick_push (elt);
5957 return builder.build ();
59d06c05
RS
5958}
5959
5960/* Return a vector rtx of mode MODE in which every element has value X.
5961 The result will be a constant if X is constant. */
5962
5963rtx
5964gen_vec_duplicate (machine_mode mode, rtx x)
5965{
c0cc00c4 5966 if (valid_for_const_vector_p (mode, x))
59d06c05
RS
5967 return gen_const_vec_duplicate (mode, x);
5968 return gen_rtx_VEC_DUPLICATE (mode, x);
5969}
15ed7b52 5970
3877c560
RS
5971/* A subroutine of const_vec_series_p that handles the case in which:
5972
5973 (GET_CODE (X) == CONST_VECTOR
5974 && CONST_VECTOR_NPATTERNS (X) == 1
5975 && !CONST_VECTOR_DUPLICATE_P (X))
5976
5977 is known to hold. */
ef339d6e
RS
5978
5979bool
5980const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5981{
3877c560
RS
5982 /* Stepped sequences are only defined for integers, to avoid specifying
5983 rounding behavior. */
5984 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5985 return false;
5986
5987 /* A non-duplicated vector with two elements can always be seen as a
5988 series with a nonzero step. Longer vectors must have a stepped
5989 encoding. */
7b777afa 5990 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
3877c560 5991 && !CONST_VECTOR_STEPPED_P (x))
ef339d6e
RS
5992 return false;
5993
3877c560 5994 /* Calculate the step between the first and second elements. */
ef339d6e
RS
5995 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5996 rtx base = CONST_VECTOR_ELT (x, 0);
5997 rtx step = simplify_binary_operation (MINUS, inner,
3877c560 5998 CONST_VECTOR_ENCODED_ELT (x, 1), base);
ef339d6e
RS
5999 if (rtx_equal_p (step, CONST0_RTX (inner)))
6000 return false;
6001
3877c560
RS
6002 /* If we have a stepped encoding, check that the step between the
6003 second and third elements is the same as STEP. */
6004 if (CONST_VECTOR_STEPPED_P (x))
ef339d6e
RS
6005 {
6006 rtx diff = simplify_binary_operation (MINUS, inner,
3877c560
RS
6007 CONST_VECTOR_ENCODED_ELT (x, 2),
6008 CONST_VECTOR_ENCODED_ELT (x, 1));
ef339d6e
RS
6009 if (!rtx_equal_p (step, diff))
6010 return false;
6011 }
6012
6013 *base_out = base;
6014 *step_out = step;
6015 return true;
6016}
6017
6018/* Generate a vector constant of mode MODE in which element I has
6019 the value BASE + I * STEP. */
6020
6021rtx
6022gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6023{
af2e4475
RS
6024 gcc_assert (valid_for_const_vector_p (mode, base)
6025 && valid_for_const_vector_p (mode, step));
ef339d6e 6026
3877c560
RS
6027 rtx_vector_builder builder (mode, 1, 3);
6028 builder.quick_push (base);
6029 for (int i = 1; i < 3; ++i)
6030 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6031 builder[i - 1], step));
6032 return builder.build ();
ef339d6e
RS
6033}
6034
6035/* Generate a vector of mode MODE in which element I has the value
6036 BASE + I * STEP. The result will be a constant if BASE and STEP
6037 are both constants. */
6038
6039rtx
6040gen_vec_series (machine_mode mode, rtx base, rtx step)
6041{
6042 if (step == const0_rtx)
6043 return gen_vec_duplicate (mode, base);
af2e4475
RS
6044 if (valid_for_const_vector_p (mode, base)
6045 && valid_for_const_vector_p (mode, step))
ef339d6e
RS
6046 return gen_const_vec_series (mode, base, step);
6047 return gen_rtx_VEC_SERIES (mode, base, step);
6048}
6049
59d06c05
RS
6050/* Generate a new vector constant for mode MODE and constant value
6051 CONSTANT. */
69ef87e2 6052
59d06c05
RS
6053static rtx
6054gen_const_vector (machine_mode mode, int constant)
6055{
6056 machine_mode inner = GET_MODE_INNER (mode);
69ef87e2 6057
59d06c05
RS
6058 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6059
6060 rtx el = const_tiny_rtx[constant][(int) inner];
6061 gcc_assert (el);
69ef87e2 6062
3877c560 6063 return gen_const_vec_duplicate (mode, el);
69ef87e2
AH
6064}
6065
a06e3c40 6066/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 6067 all elements are zero, and the one vector when all elements are one. */
a06e3c40 6068rtx
ef4bddc2 6069gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
a06e3c40 6070{
7b777afa 6071 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
a73b091d
JW
6072
6073 /* If the values are all the same, check to see if we can use one of the
6074 standard constant vectors. */
59d06c05
RS
6075 if (rtvec_all_equal_p (v))
6076 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
a73b091d 6077
3877c560
RS
6078 unsigned int nunits = GET_NUM_ELEM (v);
6079 rtx_vector_builder builder (mode, nunits, 1);
6080 for (unsigned int i = 0; i < nunits; ++i)
6081 builder.quick_push (RTVEC_ELT (v, i));
6082 return builder.build (v);
a06e3c40
R
6083}
6084
b5deb7b6
SL
6085/* Initialise global register information required by all functions. */
6086
6087void
6088init_emit_regs (void)
6089{
6090 int i;
ef4bddc2 6091 machine_mode mode;
1c3f523e 6092 mem_attrs *attrs;
b5deb7b6
SL
6093
6094 /* Reset register attributes */
aebf76a2 6095 reg_attrs_htab->empty ();
b5deb7b6
SL
6096
6097 /* We need reg_raw_mode, so initialize the modes now. */
6098 init_reg_modes_target ();
6099
6100 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
6101 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6102 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6103 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6104 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6105 virtual_incoming_args_rtx =
6106 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6107 virtual_stack_vars_rtx =
6108 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6109 virtual_stack_dynamic_rtx =
6110 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6111 virtual_outgoing_args_rtx =
6112 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6113 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
6114 virtual_preferred_stack_boundary_rtx =
6115 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
6116
6117 /* Initialize RTL for commonly used hard registers. These are
6118 copied into regno_reg_rtx as we begin to compile each function. */
6119 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 6120 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
6121
6122#ifdef RETURN_ADDRESS_POINTER_REGNUM
6123 return_address_pointer_rtx
6124 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6125#endif
6126
ca72dad5 6127 pic_offset_table_rtx = NULL_RTX;
b5deb7b6
SL
6128 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6129 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
1c3f523e
RS
6130
6131 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6132 {
ef4bddc2 6133 mode = (machine_mode) i;
766090c2 6134 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
6135 attrs->align = BITS_PER_UNIT;
6136 attrs->addrspace = ADDR_SPACE_GENERIC;
6137 if (mode != BLKmode)
6138 {
754c3d5d
RS
6139 attrs->size_known_p = true;
6140 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
6141 if (STRICT_ALIGNMENT)
6142 attrs->align = GET_MODE_ALIGNMENT (mode);
6143 }
6144 mode_mem_attrs[i] = attrs;
6145 }
af364399
ML
6146
6147 split_branch_probability = profile_probability::uninitialized ();
b5deb7b6
SL
6148}
6149
aa3a12d6
RS
6150/* Initialize global machine_mode variables. */
6151
6152void
6153init_derived_machine_modes (void)
6154{
501623d4
RS
6155 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6156 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
aa3a12d6 6157 {
501623d4
RS
6158 scalar_int_mode mode = mode_iter.require ();
6159
aa3a12d6 6160 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
501623d4
RS
6161 && !opt_byte_mode.exists ())
6162 opt_byte_mode = mode;
aa3a12d6
RS
6163
6164 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
501623d4
RS
6165 && !opt_word_mode.exists ())
6166 opt_word_mode = mode;
aa3a12d6
RS
6167 }
6168
501623d4
RS
6169 byte_mode = opt_byte_mode.require ();
6170 word_mode = opt_word_mode.require ();
f95c5b8e
RS
6171 ptr_mode = as_a <scalar_int_mode>
6172 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
aa3a12d6
RS
6173}
6174
2d888286 6175/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
6176
6177void
2d888286 6178init_emit_once (void)
23b2ce53
RS
6179{
6180 int i;
ef4bddc2 6181 machine_mode mode;
857c7b46 6182 scalar_float_mode double_mode;
16d22000 6183 opt_scalar_mode smode_iter;
23b2ce53 6184
807e902e
KZ
6185 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6186 CONST_FIXED, and memory attribute hash tables. */
aebf76a2 6187 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
173b24b9 6188
807e902e 6189#if TARGET_SUPPORTS_WIDE_INT
aebf76a2 6190 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
807e902e 6191#endif
aebf76a2 6192 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5692c7bc 6193
0c12fc9b
RS
6194 if (NUM_POLY_INT_COEFFS > 1)
6195 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6196
aebf76a2 6197 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
091a3ac7 6198
aebf76a2 6199 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
67673f5c 6200
5da077de 6201#ifdef INIT_EXPANDERS
414c4dc4
NC
6202 /* This is to initialize {init|mark|free}_machine_status before the first
6203 call to push_function_context_to. This is needed by the Chill front
a1f300c0 6204 end which calls push_function_context_to before the first call to
5da077de
AS
6205 init_function_start. */
6206 INIT_EXPANDERS;
6207#endif
6208
23b2ce53
RS
6209 /* Create the unique rtx's for certain rtx codes and operand values. */
6210
ecf835e9
KN
6211 /* Process stack-limiting command-line options. */
6212 if (opt_fstack_limit_symbol_arg != NULL)
6213 stack_limit_rtx
6214 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6215 if (opt_fstack_limit_register_no >= 0)
6216 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6217
a2a8cc44 6218 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 6219 tries to use these variables. */
23b2ce53 6220 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 6221 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 6222 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 6223
68d75312
JC
6224 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6225 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 6226 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 6227 else
3b80f6ca 6228 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 6229
857c7b46 6230 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
aa3a12d6 6231
807e902e
KZ
6232 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6233 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6234 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
6235
6236 dconstm1 = dconst1;
6237 dconstm1.sign = 1;
03f2ea93
RS
6238
6239 dconsthalf = dconst1;
1e92bbb9 6240 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 6241
e7c82a99 6242 for (i = 0; i < 3; i++)
23b2ce53 6243 {
aefa9d43 6244 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
6245 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6246
c94843d2 6247 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
15ed7b52 6248 const_tiny_rtx[i][(int) mode] =
555affd7 6249 const_double_from_real_value (*r, mode);
15ed7b52 6250
c94843d2 6251 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
5692c7bc 6252 const_tiny_rtx[i][(int) mode] =
555affd7 6253 const_double_from_real_value (*r, mode);
23b2ce53 6254
906c4e36 6255 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 6256
c94843d2 6257 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
906c4e36 6258 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 6259
ede6c734
MS
6260 for (mode = MIN_MODE_PARTIAL_INT;
6261 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 6262 mode = (machine_mode)((int)(mode) + 1))
33d3e559 6263 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
6264 }
6265
e7c82a99
JJ
6266 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6267
c94843d2 6268 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
e7c82a99
JJ
6269 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6270
5c0caeb3
RS
6271 /* For BImode, 1 and -1 are unsigned and signed interpretations
6272 of the same value. */
6273 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6274 const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6275 const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6276
ede6c734
MS
6277 for (mode = MIN_MODE_PARTIAL_INT;
6278 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 6279 mode = (machine_mode)((int)(mode) + 1))
c8a89d2a 6280 const_tiny_rtx[3][(int) mode] = constm1_rtx;
c94843d2
RS
6281
6282 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
e90721b1
AP
6283 {
6284 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6285 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6286 }
6287
c94843d2 6288 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
e90721b1
AP
6289 {
6290 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6291 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6292 }
6293
5c0caeb3
RS
6294 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6295 interpretations of the same value. */
6296 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6297 {
6298 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6299 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6300 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6301 }
6302
c94843d2 6303 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
a73b091d
JW
6304 {
6305 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6306 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 6307 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 6308 }
69ef87e2 6309
c94843d2 6310 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
a73b091d
JW
6311 {
6312 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6313 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6314 }
69ef87e2 6315
16d22000 6316 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
325217ed 6317 {
16d22000
RS
6318 scalar_mode smode = smode_iter.require ();
6319 FCONST0 (smode).data.high = 0;
6320 FCONST0 (smode).data.low = 0;
6321 FCONST0 (smode).mode = smode;
6322 const_tiny_rtx[0][(int) smode]
6323 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6324 }
6325
16d22000 6326 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
325217ed 6327 {
16d22000
RS
6328 scalar_mode smode = smode_iter.require ();
6329 FCONST0 (smode).data.high = 0;
6330 FCONST0 (smode).data.low = 0;
6331 FCONST0 (smode).mode = smode;
6332 const_tiny_rtx[0][(int) smode]
6333 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6334 }
6335
16d22000 6336 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
325217ed 6337 {
16d22000
RS
6338 scalar_mode smode = smode_iter.require ();
6339 FCONST0 (smode).data.high = 0;
6340 FCONST0 (smode).data.low = 0;
6341 FCONST0 (smode).mode = smode;
6342 const_tiny_rtx[0][(int) smode]
6343 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6344
6345 /* We store the value 1. */
16d22000
RS
6346 FCONST1 (smode).data.high = 0;
6347 FCONST1 (smode).data.low = 0;
6348 FCONST1 (smode).mode = smode;
6349 FCONST1 (smode).data
6350 = double_int_one.lshift (GET_MODE_FBIT (smode),
9be0ac8c 6351 HOST_BITS_PER_DOUBLE_INT,
16d22000
RS
6352 SIGNED_FIXED_POINT_MODE_P (smode));
6353 const_tiny_rtx[1][(int) smode]
6354 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
325217ed
CF
6355 }
6356
16d22000 6357 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
325217ed 6358 {
16d22000
RS
6359 scalar_mode smode = smode_iter.require ();
6360 FCONST0 (smode).data.high = 0;
6361 FCONST0 (smode).data.low = 0;
6362 FCONST0 (smode).mode = smode;
6363 const_tiny_rtx[0][(int) smode]
6364 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6365
6366 /* We store the value 1. */
16d22000
RS
6367 FCONST1 (smode).data.high = 0;
6368 FCONST1 (smode).data.low = 0;
6369 FCONST1 (smode).mode = smode;
6370 FCONST1 (smode).data
6371 = double_int_one.lshift (GET_MODE_FBIT (smode),
9be0ac8c 6372 HOST_BITS_PER_DOUBLE_INT,
16d22000
RS
6373 SIGNED_FIXED_POINT_MODE_P (smode));
6374 const_tiny_rtx[1][(int) smode]
6375 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
091a3ac7
CF
6376 }
6377
c94843d2 6378 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
091a3ac7
CF
6379 {
6380 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6381 }
6382
c94843d2 6383 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
091a3ac7
CF
6384 {
6385 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6386 }
6387
c94843d2 6388 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
091a3ac7
CF
6389 {
6390 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6391 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6392 }
6393
c94843d2 6394 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
091a3ac7
CF
6395 {
6396 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6397 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6398 }
6399
dbbbbf3b 6400 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
ef4bddc2 6401 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
dbbbbf3b 6402 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6403
16d22000 6404 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
d5e254e1 6405 {
16d22000
RS
6406 scalar_mode smode = smode_iter.require ();
6407 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6408 const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
d5e254e1
IE
6409 }
6410
ca4adc91
RS
6411 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6412 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6413 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6414 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
1476d1bd
MM
6415 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6416 /*prev_insn=*/NULL,
6417 /*next_insn=*/NULL,
6418 /*bb=*/NULL,
6419 /*pattern=*/NULL_RTX,
6420 /*location=*/-1,
6421 CODE_FOR_nothing,
6422 /*reg_notes=*/NULL_RTX);
23b2ce53 6423}
a11759a3 6424\f
969d70ca
JH
6425/* Produce exact duplicate of insn INSN after AFTER.
6426 Care updating of libcall regions if present. */
6427
cd459bf8 6428rtx_insn *
a1950df3 6429emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
969d70ca 6430{
cd459bf8
DM
6431 rtx_insn *new_rtx;
6432 rtx link;
969d70ca
JH
6433
6434 switch (GET_CODE (insn))
6435 {
6436 case INSN:
60564289 6437 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6438 break;
6439
6440 case JUMP_INSN:
60564289 6441 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6442 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6443 break;
6444
b5b8b0ac
AO
6445 case DEBUG_INSN:
6446 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6447 break;
6448
969d70ca 6449 case CALL_INSN:
60564289 6450 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6451 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6452 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6453 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6454 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6455 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6456 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6457 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6458 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6459 break;
6460
6461 default:
5b0264cb 6462 gcc_unreachable ();
969d70ca
JH
6463 }
6464
6465 /* Update LABEL_NUSES. */
60564289 6466 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6467
5368224f 6468 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6469
0a3d71f5
JW
6470 /* If the old insn is frame related, then so is the new one. This is
6471 primarily needed for IA-64 unwind info which marks epilogue insns,
6472 which may be duplicated by the basic block reordering code. */
60564289 6473 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6474
1581a12c
BS
6475 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6476 rtx *ptail = &REG_NOTES (new_rtx);
6477 while (*ptail != NULL_RTX)
6478 ptail = &XEXP (*ptail, 1);
6479
cf7c4aa6
HPN
6480 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6481 will make them. REG_LABEL_TARGETs are created there too, but are
6482 supposed to be sticky, so we copy them. */
969d70ca 6483 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6484 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca 6485 {
1581a12c
BS
6486 *ptail = duplicate_reg_note (link);
6487 ptail = &XEXP (*ptail, 1);
969d70ca
JH
6488 }
6489
60564289
KG
6490 INSN_CODE (new_rtx) = INSN_CODE (insn);
6491 return new_rtx;
969d70ca 6492}
e2500fed 6493
1431042e 6494static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d 6495rtx
ef4bddc2 6496gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
3e89ed8d
JH
6497{
6498 if (hard_reg_clobbers[mode][regno])
6499 return hard_reg_clobbers[mode][regno];
6500 else
6501 return (hard_reg_clobbers[mode][regno] =
6502 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6503}
6504
5368224f
DC
6505location_t prologue_location;
6506location_t epilogue_location;
78bde837
SB
6507
6508/* Hold current location information and last location information, so the
6509 datastructures are built lazily only when some instructions in given
6510 place are needed. */
3a50da34 6511static location_t curr_location;
78bde837 6512
5368224f 6513/* Allocate insn location datastructure. */
78bde837 6514void
5368224f 6515insn_locations_init (void)
78bde837 6516{
5368224f 6517 prologue_location = epilogue_location = 0;
78bde837 6518 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6519}
6520
6521/* At the end of emit stage, clear current location. */
6522void
5368224f 6523insn_locations_finalize (void)
78bde837 6524{
5368224f
DC
6525 epilogue_location = curr_location;
6526 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6527}
6528
6529/* Set current location. */
6530void
5368224f 6531set_curr_insn_location (location_t location)
78bde837 6532{
78bde837
SB
6533 curr_location = location;
6534}
6535
6536/* Get current location. */
6537location_t
5368224f 6538curr_insn_location (void)
78bde837
SB
6539{
6540 return curr_location;
6541}
6542
78bde837
SB
6543/* Return lexical scope block insn belongs to. */
6544tree
a1950df3 6545insn_scope (const rtx_insn *insn)
78bde837 6546{
5368224f 6547 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6548}
6549
6550/* Return line number of the statement that produced this insn. */
6551int
a1950df3 6552insn_line (const rtx_insn *insn)
78bde837 6553{
5368224f 6554 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6555}
6556
6557/* Return source file of the statement that produced this insn. */
6558const char *
a1950df3 6559insn_file (const rtx_insn *insn)
78bde837 6560{
5368224f 6561 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6562}
8930883e 6563
ffa4602f
EB
6564/* Return expanded location of the statement that produced this insn. */
6565expanded_location
a1950df3 6566insn_location (const rtx_insn *insn)
ffa4602f
EB
6567{
6568 return expand_location (INSN_LOCATION (insn));
6569}
6570
8930883e
MK
6571/* Return true if memory model MODEL requires a pre-operation (release-style)
6572 barrier or a post-operation (acquire-style) barrier. While not universal,
6573 this function matches behavior of several targets. */
6574
6575bool
6576need_atomic_barrier_p (enum memmodel model, bool pre)
6577{
40ad260d 6578 switch (model & MEMMODEL_BASE_MASK)
8930883e
MK
6579 {
6580 case MEMMODEL_RELAXED:
6581 case MEMMODEL_CONSUME:
6582 return false;
6583 case MEMMODEL_RELEASE:
6584 return pre;
6585 case MEMMODEL_ACQUIRE:
6586 return !pre;
6587 case MEMMODEL_ACQ_REL:
6588 case MEMMODEL_SEQ_CST:
6589 return true;
6590 default:
6591 gcc_unreachable ();
6592 }
6593}
8194c537 6594
abd3c800
RS
6595/* Return a constant shift amount for shifting a value of mode MODE
6596 by VALUE bits. */
6597
6598rtx
0c12fc9b 6599gen_int_shift_amount (machine_mode, poly_int64 value)
abd3c800
RS
6600{
6601 /* Use a 64-bit mode, to avoid any truncation.
6602
6603 ??? Perhaps this should be automatically derived from the .md files
6604 instead, or perhaps have a target hook. */
6605 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6606 ? DImode
6607 : int_mode_for_size (64, 0).require ());
6608 return gen_int_mode (value, shift_mode);
6609}
6610
8194c537
DM
6611/* Initialize fields of rtl_data related to stack alignment. */
6612
6613void
6614rtl_data::init_stack_alignment ()
6615{
6616 stack_alignment_needed = STACK_BOUNDARY;
6617 max_used_stack_slot_alignment = STACK_BOUNDARY;
6618 stack_alignment_estimated = 0;
6619 preferred_stack_boundary = STACK_BOUNDARY;
6620}
6621
8930883e 6622\f
e2500fed 6623#include "gt-emit-rtl.h"