]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
ix86: pass correct options to compiler for gfni-4 testcase
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
a5544970 2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6 36#include "coretypes.h"
4d0cdd0c 37#include "memmodel.h"
c7131fb2 38#include "backend.h"
957060b5 39#include "target.h"
23b2ce53 40#include "rtl.h"
957060b5 41#include "tree.h"
c7131fb2 42#include "df.h"
957060b5
AM
43#include "tm_p.h"
44#include "stringpool.h"
957060b5
AM
45#include "insn-config.h"
46#include "regs.h"
47#include "emit-rtl.h"
48#include "recog.h"
c7131fb2 49#include "diagnostic-core.h"
40e23961 50#include "alias.h"
40e23961 51#include "fold-const.h"
d8a2d370 52#include "varasm.h"
60393bbc 53#include "cfgrtl.h"
60393bbc 54#include "tree-eh.h"
36566b39 55#include "explow.h"
23b2ce53 56#include "expr.h"
b5b8b0ac 57#include "params.h"
9b2b7279 58#include "builtins.h"
9021b8ec 59#include "rtl-iter.h"
1f9ceff1 60#include "stor-layout.h"
ecf835e9 61#include "opts.h"
5fa396ad 62#include "predict.h"
3877c560 63#include "rtx-vector-builder.h"
fa70c221
RB
64#include "gimple.h"
65#include "gimple-ssa.h"
66#include "gimplify.h"
ca695ac9 67
5fb0e246
RS
68struct target_rtl default_target_rtl;
69#if SWITCHABLE_TARGET
70struct target_rtl *this_target_rtl = &default_target_rtl;
71#endif
72
73#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74
1d445e9e
ILT
75/* Commonly used modes. */
76
501623d4
RS
77scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
78scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
79scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 80
bd60bab2
JH
81/* Datastructures maintained for currently processed function in RTL form. */
82
3e029763 83struct rtl_data x_rtl;
bd60bab2
JH
84
85/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 86 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
87 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
88 with length attribute nested in top level structures. */
89
90rtx * regno_reg_rtx;
23b2ce53
RS
91
92/* This is *not* reset after each function. It gives each CODE_LABEL
93 in the entire compilation a unique label number. */
94
044b4de3 95static GTY(()) int label_num = 1;
23b2ce53 96
23b2ce53
RS
97/* We record floating-point CONST_DOUBLEs in each floating-point mode for
98 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
99 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
100 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 101
e7c82a99 102rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 103
68d75312
JC
104rtx const_true_rtx;
105
23b2ce53
RS
106REAL_VALUE_TYPE dconst0;
107REAL_VALUE_TYPE dconst1;
108REAL_VALUE_TYPE dconst2;
109REAL_VALUE_TYPE dconstm1;
03f2ea93 110REAL_VALUE_TYPE dconsthalf;
23b2ce53 111
325217ed
CF
112/* Record fixed-point constant 0 and 1. */
113FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
114FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
115
23b2ce53
RS
116/* We make one copy of (const_int C) where C is in
117 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
118 to save space during the compilation and simplify comparisons of
119 integers. */
120
5da077de 121rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 122
ca4adc91
RS
123/* Standard pieces of rtx, to be substituted directly into things. */
124rtx pc_rtx;
125rtx ret_rtx;
126rtx simple_return_rtx;
127rtx cc0_rtx;
128
1476d1bd
MM
129/* Marker used for denoting an INSN, which should never be accessed (i.e.,
130 this pointer should normally never be dereferenced), but is required to be
131 distinct from NULL_RTX. Currently used by peephole2 pass. */
132rtx_insn *invalid_insn_rtx;
133
c13e8210
MM
134/* A hash table storing CONST_INTs whose absolute value is greater
135 than MAX_SAVED_CONST_INT. */
136
6c907cff 137struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
138{
139 typedef HOST_WIDE_INT compare_type;
140
141 static hashval_t hash (rtx i);
142 static bool equal (rtx i, HOST_WIDE_INT h);
143};
c13e8210 144
aebf76a2
TS
145static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
146
6c907cff 147struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
148{
149 static hashval_t hash (rtx x);
150 static bool equal (rtx x, rtx y);
151};
152
153static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
807e902e 154
0c12fc9b
RS
155struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
156{
157 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
158
159 static hashval_t hash (rtx x);
160 static bool equal (rtx x, const compare_type &y);
161};
162
163static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
164
a560d4d4 165/* A hash table storing register attribute structures. */
6c907cff 166struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
aebf76a2
TS
167{
168 static hashval_t hash (reg_attrs *x);
169 static bool equal (reg_attrs *a, reg_attrs *b);
170};
171
172static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
a560d4d4 173
5692c7bc 174/* A hash table storing all CONST_DOUBLEs. */
6c907cff 175struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
176{
177 static hashval_t hash (rtx x);
178 static bool equal (rtx x, rtx y);
179};
180
181static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
5692c7bc 182
091a3ac7 183/* A hash table storing all CONST_FIXEDs. */
6c907cff 184struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
185{
186 static hashval_t hash (rtx x);
187 static bool equal (rtx x, rtx y);
188};
189
190static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
091a3ac7 191
3e029763 192#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 193#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 194#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 195
5eb2a9f2 196static void set_used_decls (tree);
502b8322 197static void mark_label_nuses (rtx);
807e902e 198#if TARGET_SUPPORTS_WIDE_INT
807e902e
KZ
199static rtx lookup_const_wide_int (rtx);
200#endif
502b8322 201static rtx lookup_const_double (rtx);
091a3ac7 202static rtx lookup_const_fixed (rtx);
ef4bddc2 203static rtx gen_const_vector (machine_mode, int);
32b32b16 204static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 205
5fa396ad
JH
206/* Probability of the conditional branch currently proceeded by try_split. */
207profile_probability split_branch_probability;
ca695ac9 208\f
c13e8210
MM
209/* Returns a hash code for X (which is a really a CONST_INT). */
210
aebf76a2
TS
211hashval_t
212const_int_hasher::hash (rtx x)
c13e8210 213{
aebf76a2 214 return (hashval_t) INTVAL (x);
c13e8210
MM
215}
216
cc2902df 217/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
218 CONST_INT) is the same as that given by Y (which is really a
219 HOST_WIDE_INT *). */
220
aebf76a2
TS
221bool
222const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
c13e8210 223{
aebf76a2 224 return (INTVAL (x) == y);
5692c7bc
ZW
225}
226
807e902e
KZ
227#if TARGET_SUPPORTS_WIDE_INT
228/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
229
aebf76a2
TS
230hashval_t
231const_wide_int_hasher::hash (rtx x)
807e902e
KZ
232{
233 int i;
d7ca26e4 234 unsigned HOST_WIDE_INT hash = 0;
aebf76a2 235 const_rtx xr = x;
807e902e
KZ
236
237 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
238 hash += CONST_WIDE_INT_ELT (xr, i);
239
240 return (hashval_t) hash;
241}
242
243/* Returns nonzero if the value represented by X (which is really a
244 CONST_WIDE_INT) is the same as that given by Y (which is really a
245 CONST_WIDE_INT). */
246
aebf76a2
TS
247bool
248const_wide_int_hasher::equal (rtx x, rtx y)
807e902e
KZ
249{
250 int i;
aebf76a2
TS
251 const_rtx xr = x;
252 const_rtx yr = y;
807e902e 253 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
aebf76a2 254 return false;
807e902e
KZ
255
256 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
257 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
aebf76a2 258 return false;
807e902e 259
aebf76a2 260 return true;
807e902e
KZ
261}
262#endif
263
0c12fc9b
RS
264/* Returns a hash code for CONST_POLY_INT X. */
265
266hashval_t
267const_poly_int_hasher::hash (rtx x)
268{
269 inchash::hash h;
270 h.add_int (GET_MODE (x));
271 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
272 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
273 return h.end ();
274}
275
276/* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
277
278bool
279const_poly_int_hasher::equal (rtx x, const compare_type &y)
280{
281 if (GET_MODE (x) != y.first)
282 return false;
283 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
284 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
285 return false;
286 return true;
287}
288
5692c7bc 289/* Returns a hash code for X (which is really a CONST_DOUBLE). */
aebf76a2
TS
290hashval_t
291const_double_hasher::hash (rtx x)
5692c7bc 292{
aebf76a2 293 const_rtx const value = x;
46b33600 294 hashval_t h;
5692c7bc 295
807e902e 296 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
297 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
298 else
fe352c29 299 {
15c812e3 300 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
301 /* MODE is used in the comparison, so it should be in the hash. */
302 h ^= GET_MODE (value);
303 }
5692c7bc
ZW
304 return h;
305}
306
cc2902df 307/* Returns nonzero if the value represented by X (really a ...)
5692c7bc 308 is the same as that represented by Y (really a ...) */
aebf76a2
TS
309bool
310const_double_hasher::equal (rtx x, rtx y)
5692c7bc 311{
aebf76a2 312 const_rtx const a = x, b = y;
5692c7bc
ZW
313
314 if (GET_MODE (a) != GET_MODE (b))
315 return 0;
807e902e 316 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
317 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
318 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
319 else
320 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
321 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
322}
323
091a3ac7
CF
324/* Returns a hash code for X (which is really a CONST_FIXED). */
325
aebf76a2
TS
326hashval_t
327const_fixed_hasher::hash (rtx x)
091a3ac7 328{
aebf76a2 329 const_rtx const value = x;
091a3ac7
CF
330 hashval_t h;
331
332 h = fixed_hash (CONST_FIXED_VALUE (value));
333 /* MODE is used in the comparison, so it should be in the hash. */
334 h ^= GET_MODE (value);
335 return h;
336}
337
aebf76a2
TS
338/* Returns nonzero if the value represented by X is the same as that
339 represented by Y. */
091a3ac7 340
aebf76a2
TS
341bool
342const_fixed_hasher::equal (rtx x, rtx y)
091a3ac7 343{
aebf76a2 344 const_rtx const a = x, b = y;
091a3ac7
CF
345
346 if (GET_MODE (a) != GET_MODE (b))
347 return 0;
348 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
349}
350
f12144dd 351/* Return true if the given memory attributes are equal. */
c13e8210 352
96b3c03f 353bool
f12144dd 354mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 355{
96b3c03f
RB
356 if (p == q)
357 return true;
358 if (!p || !q)
359 return false;
754c3d5d
RS
360 return (p->alias == q->alias
361 && p->offset_known_p == q->offset_known_p
d05d7551 362 && (!p->offset_known_p || known_eq (p->offset, q->offset))
754c3d5d 363 && p->size_known_p == q->size_known_p
d05d7551 364 && (!p->size_known_p || known_eq (p->size, q->size))
754c3d5d 365 && p->align == q->align
09e881c9 366 && p->addrspace == q->addrspace
78b76d08
SB
367 && (p->expr == q->expr
368 || (p->expr != NULL_TREE && q->expr != NULL_TREE
369 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
370}
371
f12144dd 372/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 373
f12144dd
RS
374static void
375set_mem_attrs (rtx mem, mem_attrs *attrs)
376{
f12144dd
RS
377 /* If everything is the default, we can just clear the attributes. */
378 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
379 {
380 MEM_ATTRS (mem) = 0;
381 return;
382 }
173b24b9 383
84053e02
RB
384 if (!MEM_ATTRS (mem)
385 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 386 {
766090c2 387 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 388 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 389 }
c13e8210
MM
390}
391
a560d4d4
JH
392/* Returns a hash code for X (which is a really a reg_attrs *). */
393
aebf76a2
TS
394hashval_t
395reg_attr_hasher::hash (reg_attrs *x)
a560d4d4 396{
aebf76a2 397 const reg_attrs *const p = x;
a560d4d4 398
84bc717b
RS
399 inchash::hash h;
400 h.add_ptr (p->decl);
401 h.add_poly_hwi (p->offset);
402 return h.end ();
a560d4d4
JH
403}
404
aebf76a2
TS
405/* Returns nonzero if the value represented by X is the same as that given by
406 Y. */
a560d4d4 407
aebf76a2
TS
408bool
409reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
a560d4d4 410{
aebf76a2
TS
411 const reg_attrs *const p = x;
412 const reg_attrs *const q = y;
a560d4d4 413
84bc717b 414 return (p->decl == q->decl && known_eq (p->offset, q->offset));
a560d4d4
JH
415}
416/* Allocate a new reg_attrs structure and insert it into the hash table if
417 one identical to it is not already in the table. We are doing this for
418 MEM of mode MODE. */
419
420static reg_attrs *
84bc717b 421get_reg_attrs (tree decl, poly_int64 offset)
a560d4d4
JH
422{
423 reg_attrs attrs;
a560d4d4
JH
424
425 /* If everything is the default, we can just return zero. */
84bc717b 426 if (decl == 0 && known_eq (offset, 0))
a560d4d4
JH
427 return 0;
428
429 attrs.decl = decl;
430 attrs.offset = offset;
431
aebf76a2 432 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
a560d4d4
JH
433 if (*slot == 0)
434 {
766090c2 435 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
436 memcpy (*slot, &attrs, sizeof (reg_attrs));
437 }
438
aebf76a2 439 return *slot;
a560d4d4
JH
440}
441
6fb5fa3c
DB
442
443#if !HAVE_blockage
adddc347
HPN
444/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
445 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
446
447rtx
448gen_blockage (void)
449{
450 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
451 MEM_VOLATILE_P (x) = true;
452 return x;
453}
454#endif
455
456
8deccbb7
RS
457/* Set the mode and register number of X to MODE and REGNO. */
458
459void
460set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
461{
9188b286 462 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
ad474626 463 ? hard_regno_nregs (regno, mode)
9188b286 464 : 1);
8deccbb7 465 PUT_MODE_RAW (x, mode);
9188b286 466 set_regno_raw (x, regno, nregs);
8deccbb7
RS
467}
468
08394eef
BS
469/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
470 don't attempt to share with the various global pieces of rtl (such as
471 frame_pointer_rtx). */
472
473rtx
8deccbb7 474gen_raw_REG (machine_mode mode, unsigned int regno)
08394eef 475{
84c2ad23 476 rtx x = rtx_alloc (REG MEM_STAT_INFO);
8deccbb7 477 set_mode_and_regno (x, mode, regno);
9fccb335 478 REG_ATTRS (x) = NULL;
08394eef
BS
479 ORIGINAL_REGNO (x) = regno;
480 return x;
481}
482
c5c76735
JL
483/* There are some RTL codes that require special attention; the generation
484 functions do the raw handling. If you add to this list, modify
485 special_rtx in gengenrtl.c as well. */
486
38e60c55 487rtx_expr_list *
ef4bddc2 488gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
38e60c55
DM
489{
490 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
491 expr_list));
492}
493
a756c6be 494rtx_insn_list *
ef4bddc2 495gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
a756c6be
DM
496{
497 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
498 insn_list));
499}
500
d6e1e8b8 501rtx_insn *
ef4bddc2 502gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
d6e1e8b8
DM
503 basic_block bb, rtx pattern, int location, int code,
504 rtx reg_notes)
505{
506 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
507 prev_insn, next_insn,
508 bb, pattern, location, code,
509 reg_notes));
510}
511
3b80f6ca 512rtx
ef4bddc2 513gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca
RH
514{
515 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 516 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
517
518#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
519 if (const_true_rtx && arg == STORE_FLAG_VALUE)
520 return const_true_rtx;
521#endif
522
c13e8210 523 /* Look up the CONST_INT in the hash table. */
aebf76a2
TS
524 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
525 INSERT);
29105cea 526 if (*slot == 0)
1f8f4a0b 527 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210 528
aebf76a2 529 return *slot;
3b80f6ca
RH
530}
531
2496c7bd 532rtx
0c12fc9b 533gen_int_mode (poly_int64 c, machine_mode mode)
2496c7bd 534{
0c12fc9b
RS
535 c = trunc_int_for_mode (c, mode);
536 if (c.is_constant ())
537 return GEN_INT (c.coeffs[0]);
538 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
539 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
2496c7bd
LB
540}
541
5692c7bc
ZW
542/* CONST_DOUBLEs might be created from pairs of integers, or from
543 REAL_VALUE_TYPEs. Also, their length is known only at run time,
544 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
545
546/* Determine whether REAL, a CONST_DOUBLE, already exists in the
547 hash table. If so, return its counterpart; otherwise add it
548 to the hash table and return it. */
549static rtx
502b8322 550lookup_const_double (rtx real)
5692c7bc 551{
aebf76a2 552 rtx *slot = const_double_htab->find_slot (real, INSERT);
5692c7bc
ZW
553 if (*slot == 0)
554 *slot = real;
555
aebf76a2 556 return *slot;
5692c7bc 557}
29105cea 558
5692c7bc
ZW
559/* Return a CONST_DOUBLE rtx for a floating-point value specified by
560 VALUE in mode MODE. */
0133b7d9 561rtx
ef4bddc2 562const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
0133b7d9 563{
5692c7bc
ZW
564 rtx real = rtx_alloc (CONST_DOUBLE);
565 PUT_MODE (real, mode);
566
9e254451 567 real->u.rv = value;
5692c7bc
ZW
568
569 return lookup_const_double (real);
570}
571
091a3ac7
CF
572/* Determine whether FIXED, a CONST_FIXED, already exists in the
573 hash table. If so, return its counterpart; otherwise add it
574 to the hash table and return it. */
575
576static rtx
577lookup_const_fixed (rtx fixed)
578{
aebf76a2 579 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
091a3ac7
CF
580 if (*slot == 0)
581 *slot = fixed;
582
aebf76a2 583 return *slot;
091a3ac7
CF
584}
585
586/* Return a CONST_FIXED rtx for a fixed-point value specified by
587 VALUE in mode MODE. */
588
589rtx
ef4bddc2 590const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
091a3ac7
CF
591{
592 rtx fixed = rtx_alloc (CONST_FIXED);
593 PUT_MODE (fixed, mode);
594
595 fixed->u.fv = value;
596
597 return lookup_const_fixed (fixed);
598}
599
807e902e 600#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
601/* Constructs double_int from rtx CST. */
602
603double_int
604rtx_to_double_int (const_rtx cst)
605{
606 double_int r;
607
608 if (CONST_INT_P (cst))
27bcd47c 609 r = double_int::from_shwi (INTVAL (cst));
48175537 610 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
611 {
612 r.low = CONST_DOUBLE_LOW (cst);
613 r.high = CONST_DOUBLE_HIGH (cst);
614 }
615 else
616 gcc_unreachable ();
617
618 return r;
619}
807e902e 620#endif
3e93ff81 621
807e902e
KZ
622#if TARGET_SUPPORTS_WIDE_INT
623/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
624 If so, return its counterpart; otherwise add it to the hash table and
625 return it. */
3e93ff81 626
807e902e
KZ
627static rtx
628lookup_const_wide_int (rtx wint)
629{
aebf76a2 630 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
807e902e
KZ
631 if (*slot == 0)
632 *slot = wint;
633
aebf76a2 634 return *slot;
807e902e
KZ
635}
636#endif
637
638/* Return an rtx constant for V, given that the constant has mode MODE.
639 The returned rtx will be a CONST_INT if V fits, otherwise it will be
640 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
641 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0 642
0c12fc9b
RS
643static rtx
644immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
54fb1ae0 645{
807e902e 646 unsigned int len = v.get_len ();
db61b7f9
RS
647 /* Not scalar_int_mode because we also allow pointer bound modes. */
648 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
807e902e
KZ
649
650 /* Allow truncation but not extension since we do not know if the
651 number is signed or unsigned. */
652 gcc_assert (prec <= v.get_precision ());
653
654 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
655 return gen_int_mode (v.elt (0), mode);
656
657#if TARGET_SUPPORTS_WIDE_INT
658 {
659 unsigned int i;
660 rtx value;
661 unsigned int blocks_needed
662 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
663
664 if (len > blocks_needed)
665 len = blocks_needed;
666
667 value = const_wide_int_alloc (len);
668
669 /* It is so tempting to just put the mode in here. Must control
670 myself ... */
671 PUT_MODE (value, VOIDmode);
672 CWI_PUT_NUM_ELEM (value, len);
673
674 for (i = 0; i < len; i++)
675 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
676
677 return lookup_const_wide_int (value);
678 }
679#else
680 return immed_double_const (v.elt (0), v.elt (1), mode);
681#endif
54fb1ae0
AS
682}
683
807e902e 684#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
685/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
686 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 687 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
688 implied upper bits are copies of the high bit of i1. The value
689 itself is neither signed nor unsigned. Do not use this routine for
690 non-integer modes; convert to REAL_VALUE_TYPE and use
555affd7 691 const_double_from_real_value. */
5692c7bc
ZW
692
693rtx
ef4bddc2 694immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
5692c7bc
ZW
695{
696 rtx value;
697 unsigned int i;
698
65acccdd 699 /* There are the following cases (note that there are no modes with
49ab6098 700 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
701
702 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
703 gen_int_mode.
929e10f4
MS
704 2) If the value of the integer fits into HOST_WIDE_INT anyway
705 (i.e., i1 consists only from copies of the sign bit, and sign
706 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 707 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
db61b7f9
RS
708 scalar_mode smode;
709 if (is_a <scalar_mode> (mode, &smode)
710 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
711 return gen_int_mode (i0, mode);
5692c7bc
ZW
712
713 /* If this integer fits in one word, return a CONST_INT. */
714 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
715 return GEN_INT (i0);
716
717 /* We use VOIDmode for integers. */
718 value = rtx_alloc (CONST_DOUBLE);
719 PUT_MODE (value, VOIDmode);
720
721 CONST_DOUBLE_LOW (value) = i0;
722 CONST_DOUBLE_HIGH (value) = i1;
723
724 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
725 XWINT (value, i) = 0;
726
727 return lookup_const_double (value);
0133b7d9 728}
807e902e 729#endif
0133b7d9 730
0c12fc9b
RS
731/* Return an rtx representation of C in mode MODE. */
732
733rtx
734immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
735{
736 if (c.is_constant ())
737 return immed_wide_int_const_1 (c.coeffs[0], mode);
738
739 /* Not scalar_int_mode because we also allow pointer bound modes. */
740 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
741
742 /* Allow truncation but not extension since we do not know if the
743 number is signed or unsigned. */
744 gcc_assert (prec <= c.coeffs[0].get_precision ());
745 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
746
747 /* See whether we already have an rtx for this constant. */
748 inchash::hash h;
749 h.add_int (mode);
750 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
751 h.add_wide_int (newc.coeffs[i]);
752 const_poly_int_hasher::compare_type typed_value (mode, newc);
753 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
754 h.end (), INSERT);
755 rtx x = *slot;
756 if (x)
757 return x;
758
759 /* Create a new rtx. There's a choice to be made here between installing
760 the actual mode of the rtx or leaving it as VOIDmode (for consistency
761 with CONST_INT). In practice the handling of the codes is different
762 enough that we get no benefit from using VOIDmode, and various places
763 assume that VOIDmode implies CONST_INT. Using the real mode seems like
764 the right long-term direction anyway. */
765 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
766 size_t extra_size = twi::extra_size (prec);
767 x = rtx_alloc_v (CONST_POLY_INT,
768 sizeof (struct const_poly_int_def) + extra_size);
769 PUT_MODE (x, mode);
770 CONST_POLY_INT_COEFFS (x).set_precision (prec);
771 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
772 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
773
774 *slot = x;
775 return x;
776}
777
3b80f6ca 778rtx
ef4bddc2 779gen_rtx_REG (machine_mode mode, unsigned int regno)
3b80f6ca
RH
780{
781 /* In case the MD file explicitly references the frame pointer, have
782 all such references point to the same frame pointer. This is
783 used during frame pointer elimination to distinguish the explicit
784 references to these registers from pseudos that happened to be
785 assigned to them.
786
787 If we have eliminated the frame pointer or arg pointer, we will
788 be using it as a normal register, for example as a spill
789 register. In such cases, we might be accessing it in a mode that
790 is not Pmode and therefore cannot use the pre-allocated rtx.
791
792 Also don't do this when we are making new REGs in reload, since
793 we don't want to get confused with the real pointers. */
794
55a2c322 795 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 796 {
e10c79fe
LB
797 if (regno == FRAME_POINTER_REGNUM
798 && (!reload_completed || frame_pointer_needed))
3b80f6ca 799 return frame_pointer_rtx;
c3e08036
TS
800
801 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
802 && regno == HARD_FRAME_POINTER_REGNUM
e10c79fe 803 && (!reload_completed || frame_pointer_needed))
3b80f6ca 804 return hard_frame_pointer_rtx;
3f393fc6
TS
805#if !HARD_FRAME_POINTER_IS_ARG_POINTER
806 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
807 && regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
808 return arg_pointer_rtx;
809#endif
810#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 811 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
812 return return_address_pointer_rtx;
813#endif
fc555370 814 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 815 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 816 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 817 return pic_offset_table_rtx;
bcb33994 818 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
819 return stack_pointer_rtx;
820 }
821
006a94b0 822#if 0
6cde4876 823 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
824 an existing entry in that table to avoid useless generation of RTL.
825
826 This code is disabled for now until we can fix the various backends
827 which depend on having non-shared hard registers in some cases. Long
828 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
829 on the amount of useless RTL that gets generated.
830
831 We'll also need to fix some code that runs after reload that wants to
832 set ORIGINAL_REGNO. */
833
6cde4876
JL
834 if (cfun
835 && cfun->emit
836 && regno_reg_rtx
837 && regno < FIRST_PSEUDO_REGISTER
838 && reg_raw_mode[regno] == mode)
839 return regno_reg_rtx[regno];
006a94b0 840#endif
6cde4876 841
08394eef 842 return gen_raw_REG (mode, regno);
3b80f6ca
RH
843}
844
41472af8 845rtx
ef4bddc2 846gen_rtx_MEM (machine_mode mode, rtx addr)
41472af8
MM
847{
848 rtx rt = gen_rtx_raw_MEM (mode, addr);
849
850 /* This field is not cleared by the mere allocation of the rtx, so
851 we clear it here. */
173b24b9 852 MEM_ATTRS (rt) = 0;
41472af8
MM
853
854 return rt;
855}
ddef6bc7 856
542a8afa
RH
857/* Generate a memory referring to non-trapping constant memory. */
858
859rtx
ef4bddc2 860gen_const_mem (machine_mode mode, rtx addr)
542a8afa
RH
861{
862 rtx mem = gen_rtx_MEM (mode, addr);
863 MEM_READONLY_P (mem) = 1;
864 MEM_NOTRAP_P (mem) = 1;
865 return mem;
866}
867
bf877a76
R
868/* Generate a MEM referring to fixed portions of the frame, e.g., register
869 save areas. */
870
871rtx
ef4bddc2 872gen_frame_mem (machine_mode mode, rtx addr)
bf877a76
R
873{
874 rtx mem = gen_rtx_MEM (mode, addr);
875 MEM_NOTRAP_P (mem) = 1;
876 set_mem_alias_set (mem, get_frame_alias_set ());
877 return mem;
878}
879
880/* Generate a MEM referring to a temporary use of the stack, not part
881 of the fixed stack frame. For example, something which is pushed
882 by a target splitter. */
883rtx
ef4bddc2 884gen_tmp_stack_mem (machine_mode mode, rtx addr)
bf877a76
R
885{
886 rtx mem = gen_rtx_MEM (mode, addr);
887 MEM_NOTRAP_P (mem) = 1;
e3b5732b 888 if (!cfun->calls_alloca)
bf877a76
R
889 set_mem_alias_set (mem, get_frame_alias_set ());
890 return mem;
891}
892
beb72684
RH
893/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
894 this construct would be valid, and false otherwise. */
895
896bool
ef4bddc2 897validate_subreg (machine_mode omode, machine_mode imode,
91914e56 898 const_rtx reg, poly_uint64 offset)
ddef6bc7 899{
fad2288b
RS
900 poly_uint64 isize = GET_MODE_SIZE (imode);
901 poly_uint64 osize = GET_MODE_SIZE (omode);
902
903 /* The sizes must be ordered, so that we know whether the subreg
904 is partial, paradoxical or complete. */
905 if (!ordered_p (isize, osize))
906 return false;
beb72684
RH
907
908 /* All subregs must be aligned. */
91914e56 909 if (!multiple_p (offset, osize))
beb72684
RH
910 return false;
911
912 /* The subreg offset cannot be outside the inner object. */
91914e56 913 if (maybe_ge (offset, isize))
beb72684
RH
914 return false;
915
fad2288b 916 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
1eae67f8 917
beb72684
RH
918 /* ??? This should not be here. Temporarily continue to allow word_mode
919 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
920 Generally, backends are doing something sketchy but it'll take time to
921 fix them all. */
922 if (omode == word_mode)
923 ;
924 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
925 is the culprit here, and not the backends. */
fad2288b 926 else if (known_ge (osize, regsize) && known_ge (isize, osize))
beb72684
RH
927 ;
928 /* Allow component subregs of complex and vector. Though given the below
929 extraction rules, it's not always clear what that means. */
930 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
931 && GET_MODE_INNER (imode) == omode)
932 ;
933 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
934 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
935 represent this. It's questionable if this ought to be represented at
936 all -- why can't this all be hidden in post-reload splitters that make
937 arbitrarily mode changes to the registers themselves. */
938 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
939 ;
940 /* Subregs involving floating point modes are not allowed to
941 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
942 (subreg:SI (reg:DF) 0) isn't. */
943 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
944 {
fad2288b 945 if (! (known_eq (isize, osize)
55a2c322
VM
946 /* LRA can use subreg to store a floating point value in
947 an integer mode. Although the floating point and the
948 integer modes need the same number of hard registers,
949 the size of floating point mode can be less than the
950 integer mode. LRA also uses subregs for a register
951 should be used in different mode in on insn. */
952 || lra_in_progress))
beb72684
RH
953 return false;
954 }
ddef6bc7 955
beb72684 956 /* Paradoxical subregs must have offset zero. */
fad2288b 957 if (maybe_gt (osize, isize))
91914e56 958 return known_eq (offset, 0U);
beb72684
RH
959
960 /* This is a normal subreg. Verify that the offset is representable. */
961
962 /* For hard registers, we already have most of these rules collected in
963 subreg_offset_representable_p. */
964 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
965 {
966 unsigned int regno = REGNO (reg);
967
beb72684
RH
968 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
969 && GET_MODE_INNER (imode) == omode)
970 ;
0d803030 971 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
beb72684 972 return false;
beb72684
RH
973
974 return subreg_offset_representable_p (regno, imode, offset, omode);
975 }
976
fad2288b
RS
977 /* The outer size must be ordered wrt the register size, otherwise
978 we wouldn't know at compile time how many registers the outer
979 mode occupies. */
980 if (!ordered_p (osize, regsize))
981 return false;
982
beb72684 983 /* For pseudo registers, we want most of the same checks. Namely:
1eae67f8
RS
984
985 Assume that the pseudo register will be allocated to hard registers
986 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
987 the remainder must correspond to the lowpart of the containing hard
988 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
989 otherwise it is at the lowest offset.
990
991 Given that we've already checked the mode and offset alignment,
992 we only have to check subblock subregs here. */
fad2288b 993 if (maybe_lt (osize, regsize)
55a2c322 994 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684 995 {
fad2288b
RS
996 /* It is invalid for the target to pick a register size for a mode
997 that isn't ordered wrt to the size of that mode. */
998 poly_uint64 block_size = ordered_min (isize, regsize);
91914e56
RS
999 unsigned int start_reg;
1000 poly_uint64 offset_within_reg;
1001 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1002 || (BYTES_BIG_ENDIAN
1003 ? maybe_ne (offset_within_reg, block_size - osize)
1004 : maybe_ne (offset_within_reg, 0U)))
beb72684
RH
1005 return false;
1006 }
1007 return true;
1008}
1009
1010rtx
91914e56 1011gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
beb72684
RH
1012{
1013 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 1014 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
1015}
1016
173b24b9
RK
1017/* Generate a SUBREG representing the least-significant part of REG if MODE
1018 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1019
ddef6bc7 1020rtx
ef4bddc2 1021gen_lowpart_SUBREG (machine_mode mode, rtx reg)
ddef6bc7 1022{
ef4bddc2 1023 machine_mode inmode;
ddef6bc7
JJ
1024
1025 inmode = GET_MODE (reg);
1026 if (inmode == VOIDmode)
1027 inmode = mode;
e0e08ac2
JH
1028 return gen_rtx_SUBREG (mode, reg,
1029 subreg_lowpart_offset (mode, inmode));
ddef6bc7 1030}
fcc74520
RS
1031
1032rtx
ef4bddc2 1033gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
fcc74520
RS
1034 enum var_init_status status)
1035{
1036 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1037 PAT_VAR_LOCATION_STATUS (x) = status;
1038 return x;
1039}
c5c76735 1040\f
23b2ce53 1041
80379f51
PB
1042/* Create an rtvec and stores within it the RTXen passed in the arguments. */
1043
23b2ce53 1044rtvec
e34d07f2 1045gen_rtvec (int n, ...)
23b2ce53 1046{
80379f51
PB
1047 int i;
1048 rtvec rt_val;
e34d07f2 1049 va_list p;
23b2ce53 1050
e34d07f2 1051 va_start (p, n);
23b2ce53 1052
80379f51 1053 /* Don't allocate an empty rtvec... */
23b2ce53 1054 if (n == 0)
0edf1bb2
JL
1055 {
1056 va_end (p);
1057 return NULL_RTVEC;
1058 }
23b2ce53 1059
80379f51 1060 rt_val = rtvec_alloc (n);
4f90e4a0 1061
23b2ce53 1062 for (i = 0; i < n; i++)
80379f51 1063 rt_val->elem[i] = va_arg (p, rtx);
6268b922 1064
e34d07f2 1065 va_end (p);
80379f51 1066 return rt_val;
23b2ce53
RS
1067}
1068
1069rtvec
502b8322 1070gen_rtvec_v (int n, rtx *argp)
23b2ce53 1071{
b3694847
SS
1072 int i;
1073 rtvec rt_val;
23b2ce53 1074
80379f51 1075 /* Don't allocate an empty rtvec... */
23b2ce53 1076 if (n == 0)
80379f51 1077 return NULL_RTVEC;
23b2ce53 1078
80379f51 1079 rt_val = rtvec_alloc (n);
23b2ce53
RS
1080
1081 for (i = 0; i < n; i++)
8f985ec4 1082 rt_val->elem[i] = *argp++;
23b2ce53
RS
1083
1084 return rt_val;
1085}
e6eda746
DM
1086
1087rtvec
1088gen_rtvec_v (int n, rtx_insn **argp)
1089{
1090 int i;
1091 rtvec rt_val;
1092
1093 /* Don't allocate an empty rtvec... */
1094 if (n == 0)
1095 return NULL_RTVEC;
1096
1097 rt_val = rtvec_alloc (n);
1098
1099 for (i = 0; i < n; i++)
1100 rt_val->elem[i] = *argp++;
1101
1102 return rt_val;
1103}
1104
23b2ce53 1105\f
38ae7651
RS
1106/* Return the number of bytes between the start of an OUTER_MODE
1107 in-memory value and the start of an INNER_MODE in-memory value,
1108 given that the former is a lowpart of the latter. It may be a
1109 paradoxical lowpart, in which case the offset will be negative
1110 on big-endian targets. */
1111
91914e56 1112poly_int64
ef4bddc2
RS
1113byte_lowpart_offset (machine_mode outer_mode,
1114 machine_mode inner_mode)
38ae7651 1115{
03a95621 1116 if (paradoxical_subreg_p (outer_mode, inner_mode))
38ae7651 1117 return -subreg_lowpart_offset (inner_mode, outer_mode);
03a95621
RS
1118 else
1119 return subreg_lowpart_offset (outer_mode, inner_mode);
38ae7651 1120}
3d09ba95
RS
1121
1122/* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1123 from address X. For paradoxical big-endian subregs this is a
1124 negative value, otherwise it's the same as OFFSET. */
1125
91914e56 1126poly_int64
3d09ba95 1127subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
91914e56 1128 poly_uint64 offset)
3d09ba95
RS
1129{
1130 if (paradoxical_subreg_p (outer_mode, inner_mode))
1131 {
91914e56 1132 gcc_assert (known_eq (offset, 0U));
3d09ba95
RS
1133 return -subreg_lowpart_offset (inner_mode, outer_mode);
1134 }
1135 return offset;
1136}
1137
1138/* As above, but return the offset that existing subreg X would have
1139 if SUBREG_REG (X) were stored in memory. The only significant thing
1140 about the current SUBREG_REG is its mode. */
1141
91914e56 1142poly_int64
3d09ba95
RS
1143subreg_memory_offset (const_rtx x)
1144{
1145 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1146 SUBREG_BYTE (x));
1147}
38ae7651 1148\f
23b2ce53
RS
1149/* Generate a REG rtx for a new pseudo register of mode MODE.
1150 This pseudo is assigned the next sequential register number. */
1151
1152rtx
ef4bddc2 1153gen_reg_rtx (machine_mode mode)
23b2ce53 1154{
b3694847 1155 rtx val;
2e3f842f 1156 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 1157
f8335a4f 1158 gcc_assert (can_create_pseudo_p ());
23b2ce53 1159
2e3f842f
L
1160 /* If a virtual register with bigger mode alignment is generated,
1161 increase stack alignment estimation because it might be spilled
1162 to stack later. */
b8698a0f 1163 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
1164 && crtl->stack_alignment_estimated < align
1165 && !crtl->stack_realign_processed)
ae58e548
JJ
1166 {
1167 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1168 if (crtl->stack_alignment_estimated < min_align)
1169 crtl->stack_alignment_estimated = min_align;
1170 }
2e3f842f 1171
1b3d8f8a
GK
1172 if (generating_concat_p
1173 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1174 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
1175 {
1176 /* For complex modes, don't make a single pseudo.
1177 Instead, make a CONCAT of two pseudos.
1178 This allows noncontiguous allocation of the real and imaginary parts,
1179 which makes much better code. Besides, allocating DCmode
1180 pseudos overstrains reload on some machines like the 386. */
1181 rtx realpart, imagpart;
ef4bddc2 1182 machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1183
1184 realpart = gen_reg_rtx (partmode);
1185 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1186 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1187 }
1188
004a7e45
UB
1189 /* Do not call gen_reg_rtx with uninitialized crtl. */
1190 gcc_assert (crtl->emit.regno_pointer_align_length);
1191
f44986d7
DM
1192 crtl->emit.ensure_regno_capacity ();
1193 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
23b2ce53 1194
f44986d7
DM
1195 val = gen_raw_REG (mode, reg_rtx_no);
1196 regno_reg_rtx[reg_rtx_no++] = val;
1197 return val;
1198}
0d4903b8 1199
f44986d7
DM
1200/* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1201 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
49ad7cfa 1202
f44986d7
DM
1203void
1204emit_status::ensure_regno_capacity ()
1205{
1206 int old_size = regno_pointer_align_length;
23b2ce53 1207
f44986d7
DM
1208 if (reg_rtx_no < old_size)
1209 return;
23b2ce53 1210
f44986d7
DM
1211 int new_size = old_size * 2;
1212 while (reg_rtx_no >= new_size)
1213 new_size *= 2;
1214
1215 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1216 memset (tmp + old_size, 0, new_size - old_size);
1217 regno_pointer_align = (unsigned char *) tmp;
1218
1219 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1220 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1221 regno_reg_rtx = new1;
1222
1223 crtl->emit.regno_pointer_align_length = new_size;
23b2ce53
RS
1224}
1225
a698cc03
JL
1226/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1227
1228bool
1229reg_is_parm_p (rtx reg)
1230{
1231 tree decl;
1232
1233 gcc_assert (REG_P (reg));
1234 decl = REG_EXPR (reg);
1235 return (decl && TREE_CODE (decl) == PARM_DECL);
1236}
1237
38ae7651
RS
1238/* Update NEW with the same attributes as REG, but with OFFSET added
1239 to the REG_OFFSET. */
a560d4d4 1240
e53a16e7 1241static void
84bc717b 1242update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
a560d4d4 1243{
60564289 1244 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
84bc717b 1245 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1246}
1247
38ae7651
RS
1248/* Generate a register with same attributes as REG, but with OFFSET
1249 added to the REG_OFFSET. */
e53a16e7
ILT
1250
1251rtx
ef4bddc2 1252gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
84bc717b 1253 poly_int64 offset)
e53a16e7 1254{
60564289 1255 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1256
60564289
KG
1257 update_reg_offset (new_rtx, reg, offset);
1258 return new_rtx;
e53a16e7
ILT
1259}
1260
1261/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1262 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1263
1264rtx
ef4bddc2 1265gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
e53a16e7 1266{
60564289 1267 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1268
60564289
KG
1269 update_reg_offset (new_rtx, reg, offset);
1270 return new_rtx;
a560d4d4
JH
1271}
1272
38ae7651
RS
1273/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1274 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1275
1276void
ef4bddc2 1277adjust_reg_mode (rtx reg, machine_mode mode)
a560d4d4 1278{
38ae7651
RS
1279 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1280 PUT_MODE (reg, mode);
1281}
1282
1283/* Copy REG's attributes from X, if X has any attributes. If REG and X
1284 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1285
1286void
1287set_reg_attrs_from_value (rtx reg, rtx x)
1288{
84bc717b 1289 poly_int64 offset;
de6f3f7a
L
1290 bool can_be_reg_pointer = true;
1291
1292 /* Don't call mark_reg_pointer for incompatible pointer sign
1293 extension. */
1294 while (GET_CODE (x) == SIGN_EXTEND
1295 || GET_CODE (x) == ZERO_EXTEND
1296 || GET_CODE (x) == TRUNCATE
1297 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1298 {
2a870875
RS
1299#if defined(POINTERS_EXTEND_UNSIGNED)
1300 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
8d8e740c
BE
1301 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1302 || (paradoxical_subreg_p (x)
1303 && ! (SUBREG_PROMOTED_VAR_P (x)
1304 && SUBREG_CHECK_PROMOTED_SIGN (x,
1305 POINTERS_EXTEND_UNSIGNED))))
2a870875 1306 && !targetm.have_ptr_extend ())
de6f3f7a
L
1307 can_be_reg_pointer = false;
1308#endif
1309 x = XEXP (x, 0);
1310 }
38ae7651 1311
923ba36f
JJ
1312 /* Hard registers can be reused for multiple purposes within the same
1313 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1314 on them is wrong. */
1315 if (HARD_REGISTER_P (reg))
1316 return;
1317
38ae7651 1318 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1319 if (MEM_P (x))
1320 {
527210c4
RS
1321 if (MEM_OFFSET_KNOWN_P (x))
1322 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1323 MEM_OFFSET (x) + offset);
de6f3f7a 1324 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1325 mark_reg_pointer (reg, 0);
46b71b03
PB
1326 }
1327 else if (REG_P (x))
1328 {
1329 if (REG_ATTRS (x))
1330 update_reg_offset (reg, x, offset);
de6f3f7a 1331 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1332 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1333 }
1334}
1335
1336/* Generate a REG rtx for a new pseudo register, copying the mode
1337 and attributes from X. */
1338
1339rtx
1340gen_reg_rtx_and_attrs (rtx x)
1341{
1342 rtx reg = gen_reg_rtx (GET_MODE (x));
1343 set_reg_attrs_from_value (reg, x);
1344 return reg;
a560d4d4
JH
1345}
1346
9d18e06b
JZ
1347/* Set the register attributes for registers contained in PARM_RTX.
1348 Use needed values from memory attributes of MEM. */
1349
1350void
502b8322 1351set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1352{
f8cfc6aa 1353 if (REG_P (parm_rtx))
38ae7651 1354 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1355 else if (GET_CODE (parm_rtx) == PARALLEL)
1356 {
1357 /* Check for a NULL entry in the first slot, used to indicate that the
1358 parameter goes both on the stack and in registers. */
1359 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1360 for (; i < XVECLEN (parm_rtx, 0); i++)
1361 {
1362 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1363 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1364 REG_ATTRS (XEXP (x, 0))
1365 = get_reg_attrs (MEM_EXPR (mem),
1366 INTVAL (XEXP (x, 1)));
1367 }
1368 }
1369}
1370
38ae7651
RS
1371/* Set the REG_ATTRS for registers in value X, given that X represents
1372 decl T. */
a560d4d4 1373
4e3825db 1374void
38ae7651
RS
1375set_reg_attrs_for_decl_rtl (tree t, rtx x)
1376{
1f9ceff1
AO
1377 if (!t)
1378 return;
1379 tree tdecl = t;
38ae7651 1380 if (GET_CODE (x) == SUBREG)
fbe6ec81 1381 {
38ae7651
RS
1382 gcc_assert (subreg_lowpart_p (x));
1383 x = SUBREG_REG (x);
fbe6ec81 1384 }
f8cfc6aa 1385 if (REG_P (x))
38ae7651
RS
1386 REG_ATTRS (x)
1387 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1f9ceff1
AO
1388 DECL_P (tdecl)
1389 ? DECL_MODE (tdecl)
1390 : TYPE_MODE (TREE_TYPE (tdecl))));
a560d4d4
JH
1391 if (GET_CODE (x) == CONCAT)
1392 {
1393 if (REG_P (XEXP (x, 0)))
1394 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1395 if (REG_P (XEXP (x, 1)))
1396 REG_ATTRS (XEXP (x, 1))
1397 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1398 }
1399 if (GET_CODE (x) == PARALLEL)
1400 {
d4afac5b
JZ
1401 int i, start;
1402
1403 /* Check for a NULL entry, used to indicate that the parameter goes
1404 both on the stack and in registers. */
1405 if (XEXP (XVECEXP (x, 0, 0), 0))
1406 start = 0;
1407 else
1408 start = 1;
1409
1410 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1411 {
1412 rtx y = XVECEXP (x, 0, i);
1413 if (REG_P (XEXP (y, 0)))
1414 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1415 }
1416 }
1417}
1418
38ae7651
RS
1419/* Assign the RTX X to declaration T. */
1420
1421void
1422set_decl_rtl (tree t, rtx x)
1423{
1424 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1425 if (x)
1426 set_reg_attrs_for_decl_rtl (t, x);
1427}
1428
5141868d
RS
1429/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1430 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1431
1432void
5141868d 1433set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1434{
1435 DECL_INCOMING_RTL (t) = x;
5141868d 1436 if (x && !by_reference_p)
38ae7651
RS
1437 set_reg_attrs_for_decl_rtl (t, x);
1438}
1439
754fdcca
RK
1440/* Identify REG (which may be a CONCAT) as a user register. */
1441
1442void
502b8322 1443mark_user_reg (rtx reg)
754fdcca
RK
1444{
1445 if (GET_CODE (reg) == CONCAT)
1446 {
1447 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1448 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1449 }
754fdcca 1450 else
5b0264cb
NS
1451 {
1452 gcc_assert (REG_P (reg));
1453 REG_USERVAR_P (reg) = 1;
1454 }
754fdcca
RK
1455}
1456
86fe05e0
RK
1457/* Identify REG as a probable pointer register and show its alignment
1458 as ALIGN, if nonzero. */
23b2ce53
RS
1459
1460void
502b8322 1461mark_reg_pointer (rtx reg, int align)
23b2ce53 1462{
3502dc9c 1463 if (! REG_POINTER (reg))
00995e78 1464 {
3502dc9c 1465 REG_POINTER (reg) = 1;
86fe05e0 1466
00995e78
RE
1467 if (align)
1468 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1469 }
1470 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1471 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1472 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1473}
1474
1475/* Return 1 plus largest pseudo reg number used in the current function. */
1476
1477int
502b8322 1478max_reg_num (void)
23b2ce53
RS
1479{
1480 return reg_rtx_no;
1481}
1482
1483/* Return 1 + the largest label number used so far in the current function. */
1484
1485int
502b8322 1486max_label_num (void)
23b2ce53 1487{
23b2ce53
RS
1488 return label_num;
1489}
1490
1491/* Return first label number used in this function (if any were used). */
1492
1493int
502b8322 1494get_first_label_num (void)
23b2ce53
RS
1495{
1496 return first_label_num;
1497}
6de9cd9a
DN
1498
1499/* If the rtx for label was created during the expansion of a nested
1500 function, then first_label_num won't include this label number.
fa10beec 1501 Fix this now so that array indices work later. */
6de9cd9a
DN
1502
1503void
9aa50db7 1504maybe_set_first_label_num (rtx_code_label *x)
6de9cd9a
DN
1505{
1506 if (CODE_LABEL_NUMBER (x) < first_label_num)
1507 first_label_num = CODE_LABEL_NUMBER (x);
1508}
51b86113
DM
1509
1510/* For use by the RTL function loader, when mingling with normal
1511 functions.
1512 Ensure that label_num is greater than the label num of X, to avoid
1513 duplicate labels in the generated assembler. */
1514
1515void
1516maybe_set_max_label_num (rtx_code_label *x)
1517{
1518 if (CODE_LABEL_NUMBER (x) >= label_num)
1519 label_num = CODE_LABEL_NUMBER (x) + 1;
1520}
1521
23b2ce53
RS
1522\f
1523/* Return a value representing some low-order bits of X, where the number
1524 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1525 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1526 representation is returned.
1527
1528 This function handles the cases in common between gen_lowpart, below,
1529 and two variants in cse.c and combine.c. These are the cases that can
1530 be safely handled at all points in the compilation.
1531
1532 If this is not a case we can handle, return 0. */
1533
1534rtx
ef4bddc2 1535gen_lowpart_common (machine_mode mode, rtx x)
23b2ce53 1536{
fad2288b 1537 poly_uint64 msize = GET_MODE_SIZE (mode);
ef4bddc2 1538 machine_mode innermode;
550d1387
GK
1539
1540 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1541 so we have to make one up. Yuk. */
1542 innermode = GET_MODE (x);
481683e1 1543 if (CONST_INT_P (x)
fad2288b
RS
1544 && known_le (msize * BITS_PER_UNIT,
1545 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
f4b31647 1546 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
550d1387 1547 else if (innermode == VOIDmode)
f4b31647 1548 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
b8698a0f 1549
5b0264cb 1550 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1551
550d1387 1552 if (innermode == mode)
23b2ce53
RS
1553 return x;
1554
fad2288b
RS
1555 /* The size of the outer and inner modes must be ordered. */
1556 poly_uint64 xsize = GET_MODE_SIZE (innermode);
1557 if (!ordered_p (msize, xsize))
1558 return 0;
1559
1eae67f8
RS
1560 if (SCALAR_FLOAT_MODE_P (mode))
1561 {
1562 /* Don't allow paradoxical FLOAT_MODE subregs. */
fad2288b 1563 if (maybe_gt (msize, xsize))
1eae67f8
RS
1564 return 0;
1565 }
1566 else
1567 {
1568 /* MODE must occupy no more of the underlying registers than X. */
fad2288b
RS
1569 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1570 unsigned int mregs, xregs;
1571 if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1572 || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1573 || mregs > xregs)
1eae67f8
RS
1574 return 0;
1575 }
53501a19 1576
54651377 1577 scalar_int_mode int_mode, int_innermode, from_mode;
23b2ce53 1578 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
54651377
RS
1579 && is_a <scalar_int_mode> (mode, &int_mode)
1580 && is_a <scalar_int_mode> (innermode, &int_innermode)
1581 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
23b2ce53
RS
1582 {
1583 /* If we are getting the low-order part of something that has been
1584 sign- or zero-extended, we can either just use the object being
1585 extended or make a narrower extension. If we want an even smaller
1586 piece than the size of the object being extended, call ourselves
1587 recursively.
1588
1589 This case is used mostly by combine and cse. */
1590
54651377 1591 if (from_mode == int_mode)
23b2ce53 1592 return XEXP (x, 0);
54651377
RS
1593 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1594 return gen_lowpart_common (int_mode, XEXP (x, 0));
1595 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1596 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
23b2ce53 1597 }
f8cfc6aa 1598 else if (GET_CODE (x) == SUBREG || REG_P (x)
2072a319 1599 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
0c12fc9b
RS
1600 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1601 || CONST_POLY_INT_P (x))
3403a1a9 1602 return lowpart_subreg (mode, x, innermode);
8aada4ad 1603
23b2ce53
RS
1604 /* Otherwise, we can't do this. */
1605 return 0;
1606}
1607\f
ccba022b 1608rtx
ef4bddc2 1609gen_highpart (machine_mode mode, rtx x)
ccba022b 1610{
cf098191 1611 poly_uint64 msize = GET_MODE_SIZE (mode);
e0e08ac2 1612 rtx result;
ddef6bc7 1613
ccba022b
RS
1614 /* This case loses if X is a subreg. To catch bugs early,
1615 complain if an invalid MODE is used even in other cases. */
cf098191
RS
1616 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1617 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
ddef6bc7 1618
e0e08ac2
JH
1619 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1620 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1621 gcc_assert (result);
b8698a0f 1622
09482e0d
JW
1623 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1624 the target if we have a MEM. gen_highpart must return a valid operand,
1625 emitting code if necessary to do so. */
5b0264cb
NS
1626 if (MEM_P (result))
1627 {
1628 result = validize_mem (result);
1629 gcc_assert (result);
1630 }
b8698a0f 1631
e0e08ac2
JH
1632 return result;
1633}
5222e470 1634
26d249eb 1635/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1636 be VOIDmode constant. */
1637rtx
ef4bddc2 1638gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
5222e470
JH
1639{
1640 if (GET_MODE (exp) != VOIDmode)
1641 {
5b0264cb 1642 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1643 return gen_highpart (outermode, exp);
1644 }
1645 return simplify_gen_subreg (outermode, exp, innermode,
1646 subreg_highpart_offset (outermode, innermode));
1647}
68252e27 1648
33951763
RS
1649/* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1650 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
8698cce3 1651
91914e56
RS
1652poly_uint64
1653subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
e0e08ac2 1654{
91914e56
RS
1655 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1656 if (maybe_gt (outer_bytes, inner_bytes))
33951763
RS
1657 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1658 return 0;
ddef6bc7 1659
33951763
RS
1660 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1661 return inner_bytes - outer_bytes;
1662 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1663 return 0;
1664 else
1665 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
ccba022b 1666}
eea50aa0 1667
33951763
RS
1668/* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1669 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1670
91914e56
RS
1671poly_uint64
1672subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
eea50aa0 1673{
91914e56 1674 gcc_assert (known_ge (inner_bytes, outer_bytes));
eea50aa0 1675
33951763
RS
1676 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1677 return 0;
1678 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1679 return inner_bytes - outer_bytes;
1680 else
1681 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1682 (inner_bytes - outer_bytes)
1683 * BITS_PER_UNIT);
eea50aa0 1684}
ccba022b 1685
23b2ce53
RS
1686/* Return 1 iff X, assumed to be a SUBREG,
1687 refers to the least significant part of its containing reg.
1688 If X is not a SUBREG, always return 1 (it is its own low part!). */
1689
1690int
fa233e34 1691subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1692{
1693 if (GET_CODE (x) != SUBREG)
1694 return 1;
a3a03040
RK
1695 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1696 return 0;
23b2ce53 1697
91914e56
RS
1698 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1699 GET_MODE (SUBREG_REG (x))),
1700 SUBREG_BYTE (x));
23b2ce53
RS
1701}
1702\f
ddef6bc7
JJ
1703/* Return subword OFFSET of operand OP.
1704 The word number, OFFSET, is interpreted as the word number starting
1705 at the low-order address. OFFSET 0 is the low-order word if not
1706 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1707
1708 If we cannot extract the required word, we return zero. Otherwise,
1709 an rtx corresponding to the requested word will be returned.
1710
1711 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1712 reload has completed, a valid address will always be returned. After
1713 reload, if a valid address cannot be returned, we return zero.
1714
1715 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1716 it is the responsibility of the caller.
1717
1718 MODE is the mode of OP in case it is a CONST_INT.
1719
1720 ??? This is still rather broken for some cases. The problem for the
1721 moment is that all callers of this thing provide no 'goal mode' to
1722 tell us to work with. This exists because all callers were written
0631e0bf
JH
1723 in a word based SUBREG world.
1724 Now use of this function can be deprecated by simplify_subreg in most
1725 cases.
1726 */
ddef6bc7
JJ
1727
1728rtx
fdbfe4e5
RS
1729operand_subword (rtx op, poly_uint64 offset, int validate_address,
1730 machine_mode mode)
ddef6bc7
JJ
1731{
1732 if (mode == VOIDmode)
1733 mode = GET_MODE (op);
1734
5b0264cb 1735 gcc_assert (mode != VOIDmode);
ddef6bc7 1736
30f7a378 1737 /* If OP is narrower than a word, fail. */
ddef6bc7 1738 if (mode != BLKmode
fdbfe4e5 1739 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
ddef6bc7
JJ
1740 return 0;
1741
30f7a378 1742 /* If we want a word outside OP, return zero. */
ddef6bc7 1743 if (mode != BLKmode
fdbfe4e5 1744 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
ddef6bc7
JJ
1745 return const0_rtx;
1746
ddef6bc7 1747 /* Form a new MEM at the requested address. */
3c0cb5de 1748 if (MEM_P (op))
ddef6bc7 1749 {
60564289 1750 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1751
f1ec5147 1752 if (! validate_address)
60564289 1753 return new_rtx;
f1ec5147
RK
1754
1755 else if (reload_completed)
ddef6bc7 1756 {
09e881c9
BE
1757 if (! strict_memory_address_addr_space_p (word_mode,
1758 XEXP (new_rtx, 0),
1759 MEM_ADDR_SPACE (op)))
f1ec5147 1760 return 0;
ddef6bc7 1761 }
f1ec5147 1762 else
60564289 1763 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1764 }
1765
0631e0bf
JH
1766 /* Rest can be handled by simplify_subreg. */
1767 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1768}
1769
535a42b1
NS
1770/* Similar to `operand_subword', but never return 0. If we can't
1771 extract the required subword, put OP into a register and try again.
1772 The second attempt must succeed. We always validate the address in
1773 this case.
23b2ce53
RS
1774
1775 MODE is the mode of OP, in case it is CONST_INT. */
1776
1777rtx
fdbfe4e5 1778operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
23b2ce53 1779{
ddef6bc7 1780 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1781
1782 if (result)
1783 return result;
1784
1785 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb 1786 {
67914693 1787 /* If this is a register which cannot be accessed by words, copy it
77e6b0eb 1788 to a pseudo register. */
f8cfc6aa 1789 if (REG_P (op))
77e6b0eb
JC
1790 op = copy_to_reg (op);
1791 else
1792 op = force_reg (mode, op);
1793 }
23b2ce53 1794
ddef6bc7 1795 result = operand_subword (op, offset, 1, mode);
5b0264cb 1796 gcc_assert (result);
23b2ce53
RS
1797
1798 return result;
1799}
1800\f
d05d7551
RS
1801mem_attrs::mem_attrs ()
1802 : expr (NULL_TREE),
1803 offset (0),
1804 size (0),
1805 alias (0),
1806 align (0),
1807 addrspace (ADDR_SPACE_GENERIC),
1808 offset_known_p (false),
1809 size_known_p (false)
1810{}
1811
2b3493c8
AK
1812/* Returns 1 if both MEM_EXPR can be considered equal
1813 and 0 otherwise. */
1814
1815int
4f588890 1816mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1817{
1818 if (expr1 == expr2)
1819 return 1;
1820
1821 if (! expr1 || ! expr2)
1822 return 0;
1823
1824 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1825 return 0;
1826
55b34b5f 1827 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1828}
1829
805903b5
JJ
1830/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1831 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1832 -1 if not known. */
1833
1834int
d9223014 1835get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1836{
1837 tree expr;
d05d7551 1838 poly_uint64 offset;
805903b5
JJ
1839
1840 /* This function can't use
527210c4 1841 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1842 || (MAX (MEM_ALIGN (mem),
0eb77834 1843 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1844 < align))
1845 return -1;
1846 else
527210c4 1847 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1848 for two reasons:
1849 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1850 for <variable>. get_inner_reference doesn't handle it and
1851 even if it did, the alignment in that case needs to be determined
1852 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1853 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1854 isn't sufficiently aligned, the object it is in might be. */
1855 gcc_assert (MEM_P (mem));
1856 expr = MEM_EXPR (mem);
527210c4 1857 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1858 return -1;
1859
527210c4 1860 offset = MEM_OFFSET (mem);
805903b5
JJ
1861 if (DECL_P (expr))
1862 {
1863 if (DECL_ALIGN (expr) < align)
1864 return -1;
1865 }
1866 else if (INDIRECT_REF_P (expr))
1867 {
1868 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1869 return -1;
1870 }
1871 else if (TREE_CODE (expr) == COMPONENT_REF)
1872 {
1873 while (1)
1874 {
1875 tree inner = TREE_OPERAND (expr, 0);
1876 tree field = TREE_OPERAND (expr, 1);
1877 tree byte_offset = component_ref_field_offset (expr);
1878 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1879
d05d7551 1880 poly_uint64 suboffset;
805903b5 1881 if (!byte_offset
d05d7551 1882 || !poly_int_tree_p (byte_offset, &suboffset)
cc269bb6 1883 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1884 return -1;
1885
d05d7551 1886 offset += suboffset;
ae7e9ddd 1887 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1888
1889 if (inner == NULL_TREE)
1890 {
1891 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1892 < (unsigned int) align)
1893 return -1;
1894 break;
1895 }
1896 else if (DECL_P (inner))
1897 {
1898 if (DECL_ALIGN (inner) < align)
1899 return -1;
1900 break;
1901 }
1902 else if (TREE_CODE (inner) != COMPONENT_REF)
1903 return -1;
1904 expr = inner;
1905 }
1906 }
1907 else
1908 return -1;
1909
d05d7551
RS
1910 HOST_WIDE_INT misalign;
1911 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1912 return -1;
1913 return misalign;
805903b5
JJ
1914}
1915
6926c713 1916/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1917 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1918 if we are making a new object of this type. BITPOS is nonzero if
1919 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1920
1921void
502b8322 1922set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
d05d7551 1923 poly_int64 bitpos)
173b24b9 1924{
d05d7551 1925 poly_int64 apply_bitpos = 0;
173b24b9 1926 tree type;
f12144dd 1927 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1928 addr_space_t as;
173b24b9
RK
1929
1930 /* It can happen that type_for_mode was given a mode for which there
1931 is no language-level type. In which case it returns NULL, which
1932 we can see here. */
1933 if (t == NULL_TREE)
1934 return;
1935
1936 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1937 if (type == error_mark_node)
1938 return;
173b24b9 1939
173b24b9
RK
1940 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1941 wrong answer, as it assumes that DECL_RTL already has the right alias
1942 info. Callers should not set DECL_RTL until after the call to
1943 set_mem_attributes. */
5b0264cb 1944 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1945
738cc472 1946 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1947 front-end routine) and use it. */
f12144dd 1948 attrs.alias = get_alias_set (t);
173b24b9 1949
a5e9c810 1950 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1951 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1952
268f7033 1953 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1954 refattrs = MEM_ATTRS (ref);
1955 if (refattrs)
268f7033
UW
1956 {
1957 /* ??? Can this ever happen? Calling this routine on a MEM that
1958 already carries memory attributes should probably be invalid. */
f12144dd 1959 attrs.expr = refattrs->expr;
754c3d5d 1960 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1961 attrs.offset = refattrs->offset;
754c3d5d 1962 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1963 attrs.size = refattrs->size;
1964 attrs.align = refattrs->align;
268f7033
UW
1965 }
1966
1967 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1968 else
268f7033 1969 {
f12144dd
RS
1970 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1971 gcc_assert (!defattrs->expr);
754c3d5d 1972 gcc_assert (!defattrs->offset_known_p);
f12144dd 1973
268f7033 1974 /* Respect mode size. */
754c3d5d 1975 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1976 attrs.size = defattrs->size;
268f7033
UW
1977 /* ??? Is this really necessary? We probably should always get
1978 the size from the type below. */
1979
1980 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1981 if T is an object, always compute the object alignment below. */
f12144dd
RS
1982 if (TYPE_P (t))
1983 attrs.align = defattrs->align;
1984 else
1985 attrs.align = BITS_PER_UNIT;
268f7033
UW
1986 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1987 e.g. if the type carries an alignment attribute. Should we be
1988 able to simply always use TYPE_ALIGN? */
1989 }
1990
25b75a48
BE
1991 /* We can set the alignment from the type if we are making an object or if
1992 this is an INDIRECT_REF. */
1993 if (objectp || TREE_CODE (t) == INDIRECT_REF)
f12144dd 1994 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1995
738cc472 1996 /* If the size is known, we can set that. */
a787ccc3 1997 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1998
30b0317c
RB
1999 /* The address-space is that of the type. */
2000 as = TYPE_ADDR_SPACE (type);
2001
80965c18
RK
2002 /* If T is not a type, we may be able to deduce some more information about
2003 the expression. */
2004 if (! TYPE_P (t))
8ac61af7 2005 {
8476af98 2006 tree base;
389fdba0 2007
8ac61af7
RK
2008 if (TREE_THIS_VOLATILE (t))
2009 MEM_VOLATILE_P (ref) = 1;
173b24b9 2010
c56e3582
RK
2011 /* Now remove any conversions: they don't change what the underlying
2012 object is. Likewise for SAVE_EXPR. */
1043771b 2013 while (CONVERT_EXPR_P (t)
c56e3582
RK
2014 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2015 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
2016 t = TREE_OPERAND (t, 0);
2017
4994da65
RG
2018 /* Note whether this expression can trap. */
2019 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2020
2021 base = get_base_address (t);
f18a7b25
MJ
2022 if (base)
2023 {
2024 if (DECL_P (base)
2025 && TREE_READONLY (base)
2026 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2027 && !TREE_THIS_VOLATILE (base))
2028 MEM_READONLY_P (ref) = 1;
2029
2030 /* Mark static const strings readonly as well. */
2031 if (TREE_CODE (base) == STRING_CST
2032 && TREE_READONLY (base)
2033 && TREE_STATIC (base))
2034 MEM_READONLY_P (ref) = 1;
2035
30b0317c 2036 /* Address-space information is on the base object. */
f18a7b25
MJ
2037 if (TREE_CODE (base) == MEM_REF
2038 || TREE_CODE (base) == TARGET_MEM_REF)
2039 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2040 0))));
2041 else
2042 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2043 }
ba30e50d 2044
2039d7aa
RH
2045 /* If this expression uses it's parent's alias set, mark it such
2046 that we won't change it. */
b4ada065 2047 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
2048 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2049
8ac61af7
RK
2050 /* If this is a decl, set the attributes of the MEM from it. */
2051 if (DECL_P (t))
2052 {
f12144dd 2053 attrs.expr = t;
754c3d5d
RS
2054 attrs.offset_known_p = true;
2055 attrs.offset = 0;
6f1087be 2056 apply_bitpos = bitpos;
a787ccc3 2057 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
2058 }
2059
30b0317c 2060 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 2061 else if (CONSTANT_CLASS_P (t))
30b0317c 2062 ;
998d7deb 2063
a787ccc3
RS
2064 /* If this is a field reference, record it. */
2065 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 2066 {
f12144dd 2067 attrs.expr = t;
754c3d5d
RS
2068 attrs.offset_known_p = true;
2069 attrs.offset = 0;
6f1087be 2070 apply_bitpos = bitpos;
a787ccc3
RS
2071 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2072 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
2073 }
2074
2075 /* If this is an array reference, look for an outer field reference. */
2076 else if (TREE_CODE (t) == ARRAY_REF)
2077 {
2078 tree off_tree = size_zero_node;
1b1838b6
JW
2079 /* We can't modify t, because we use it at the end of the
2080 function. */
2081 tree t2 = t;
998d7deb
RH
2082
2083 do
2084 {
1b1838b6 2085 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
2086 tree low_bound = array_ref_low_bound (t2);
2087 tree unit_size = array_ref_element_size (t2);
2567406a
JH
2088
2089 /* We assume all arrays have sizes that are a multiple of a byte.
2090 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
2091 index, then convert to sizetype and multiply by the size of
2092 the array element. */
2093 if (! integer_zerop (low_bound))
4845b383
KH
2094 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2095 index, low_bound);
2567406a 2096
44de5aeb 2097 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
2098 size_binop (MULT_EXPR,
2099 fold_convert (sizetype,
2100 index),
44de5aeb
RK
2101 unit_size),
2102 off_tree);
1b1838b6 2103 t2 = TREE_OPERAND (t2, 0);
998d7deb 2104 }
1b1838b6 2105 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 2106
30b0317c 2107 if (DECL_P (t2)
12ead254
RB
2108 || (TREE_CODE (t2) == COMPONENT_REF
2109 /* For trailing arrays t2 doesn't have a size that
2110 covers all valid accesses. */
c3e46927 2111 && ! array_at_struct_end_p (t)))
998d7deb 2112 {
f12144dd 2113 attrs.expr = t2;
754c3d5d 2114 attrs.offset_known_p = false;
d05d7551 2115 if (poly_int_tree_p (off_tree, &attrs.offset))
6f1087be 2116 {
754c3d5d 2117 attrs.offset_known_p = true;
6f1087be
RH
2118 apply_bitpos = bitpos;
2119 }
998d7deb 2120 }
30b0317c 2121 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
2122 }
2123
56c47f22 2124 /* If this is an indirect reference, record it. */
70f34814 2125 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 2126 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 2127 {
f12144dd 2128 attrs.expr = t;
754c3d5d
RS
2129 attrs.offset_known_p = true;
2130 attrs.offset = 0;
56c47f22
RG
2131 apply_bitpos = bitpos;
2132 }
2133
fa70c221
RB
2134 /* If this is a reference based on a partitioned decl replace the
2135 base with a MEM_REF of the pointer representative we created
2136 during stack slot partitioning. */
2137 if (attrs.expr
2138 && VAR_P (base)
2139 && ! is_global_var (base)
2140 && cfun->gimple_df->decls_to_pointers != NULL)
2141 {
2142 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2143 if (namep)
2144 {
2145 attrs.expr = unshare_expr (attrs.expr);
2146 tree *orig_base = &attrs.expr;
2147 while (handled_component_p (*orig_base))
2148 orig_base = &TREE_OPERAND (*orig_base, 0);
2149 tree aptrt = reference_alias_ptr_type (*orig_base);
2150 *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2151 build_int_cst (aptrt, 0));
2152 }
2153 }
2154
30b0317c
RB
2155 /* Compute the alignment. */
2156 unsigned int obj_align;
2157 unsigned HOST_WIDE_INT obj_bitpos;
2158 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
d05d7551
RS
2159 unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2160 if (diff_align != 0)
2161 obj_align = MIN (obj_align, diff_align);
30b0317c 2162 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
2163 }
2164
d05d7551
RS
2165 poly_uint64 const_size;
2166 if (poly_int_tree_p (new_size, &const_size))
a787ccc3
RS
2167 {
2168 attrs.size_known_p = true;
d05d7551 2169 attrs.size = const_size;
a787ccc3
RS
2170 }
2171
15c812e3 2172 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
2173 bit position offset. Similarly, increase the size of the accessed
2174 object to contain the negative offset. */
d05d7551 2175 if (maybe_ne (apply_bitpos, 0))
8c317c5f 2176 {
754c3d5d 2177 gcc_assert (attrs.offset_known_p);
d05d7551
RS
2178 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2179 attrs.offset -= bytepos;
754c3d5d 2180 if (attrs.size_known_p)
d05d7551 2181 attrs.size += bytepos;
8c317c5f 2182 }
6f1087be 2183
8ac61af7 2184 /* Now set the attributes we computed above. */
f18a7b25 2185 attrs.addrspace = as;
f12144dd 2186 set_mem_attrs (ref, &attrs);
173b24b9
RK
2187}
2188
6f1087be 2189void
502b8322 2190set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
2191{
2192 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2193}
2194
173b24b9
RK
2195/* Set the alias set of MEM to SET. */
2196
2197void
4862826d 2198set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 2199{
173b24b9 2200 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 2201 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
d05d7551 2202 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2203 attrs.alias = set;
2204 set_mem_attrs (mem, &attrs);
09e881c9
BE
2205}
2206
2207/* Set the address space of MEM to ADDRSPACE (target-defined). */
2208
2209void
2210set_mem_addr_space (rtx mem, addr_space_t addrspace)
2211{
d05d7551 2212 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2213 attrs.addrspace = addrspace;
2214 set_mem_attrs (mem, &attrs);
173b24b9 2215}
738cc472 2216
d022d93e 2217/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
2218
2219void
502b8322 2220set_mem_align (rtx mem, unsigned int align)
738cc472 2221{
d05d7551 2222 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2223 attrs.align = align;
2224 set_mem_attrs (mem, &attrs);
738cc472 2225}
1285011e 2226
998d7deb 2227/* Set the expr for MEM to EXPR. */
1285011e
RK
2228
2229void
502b8322 2230set_mem_expr (rtx mem, tree expr)
1285011e 2231{
d05d7551 2232 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2233 attrs.expr = expr;
2234 set_mem_attrs (mem, &attrs);
1285011e 2235}
998d7deb
RH
2236
2237/* Set the offset of MEM to OFFSET. */
2238
2239void
d05d7551 2240set_mem_offset (rtx mem, poly_int64 offset)
998d7deb 2241{
d05d7551 2242 mem_attrs attrs (*get_mem_attrs (mem));
754c3d5d
RS
2243 attrs.offset_known_p = true;
2244 attrs.offset = offset;
527210c4
RS
2245 set_mem_attrs (mem, &attrs);
2246}
2247
2248/* Clear the offset of MEM. */
2249
2250void
2251clear_mem_offset (rtx mem)
2252{
d05d7551 2253 mem_attrs attrs (*get_mem_attrs (mem));
754c3d5d 2254 attrs.offset_known_p = false;
f12144dd 2255 set_mem_attrs (mem, &attrs);
35aff10b
AM
2256}
2257
2258/* Set the size of MEM to SIZE. */
2259
2260void
d05d7551 2261set_mem_size (rtx mem, poly_int64 size)
35aff10b 2262{
d05d7551 2263 mem_attrs attrs (*get_mem_attrs (mem));
754c3d5d
RS
2264 attrs.size_known_p = true;
2265 attrs.size = size;
f5541398
RS
2266 set_mem_attrs (mem, &attrs);
2267}
2268
2269/* Clear the size of MEM. */
2270
2271void
2272clear_mem_size (rtx mem)
2273{
d05d7551 2274 mem_attrs attrs (*get_mem_attrs (mem));
754c3d5d 2275 attrs.size_known_p = false;
f12144dd 2276 set_mem_attrs (mem, &attrs);
998d7deb 2277}
173b24b9 2278\f
738cc472
RK
2279/* Return a memory reference like MEMREF, but with its mode changed to MODE
2280 and its address changed to ADDR. (VOIDmode means don't change the mode.
2281 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2282 returned memory location is required to be valid. INPLACE is true if any
2283 changes can be made directly to MEMREF or false if MEMREF must be treated
2284 as immutable.
2285
2286 The memory attributes are not changed. */
23b2ce53 2287
738cc472 2288static rtx
ef4bddc2 2289change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
23b33725 2290 bool inplace)
23b2ce53 2291{
09e881c9 2292 addr_space_t as;
60564289 2293 rtx new_rtx;
23b2ce53 2294
5b0264cb 2295 gcc_assert (MEM_P (memref));
09e881c9 2296 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2297 if (mode == VOIDmode)
2298 mode = GET_MODE (memref);
2299 if (addr == 0)
2300 addr = XEXP (memref, 0);
a74ff877 2301 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2302 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2303 return memref;
23b2ce53 2304
91c5ee5b
VM
2305 /* Don't validate address for LRA. LRA can make the address valid
2306 by itself in most efficient way. */
2307 if (validate && !lra_in_progress)
23b2ce53 2308 {
f1ec5147 2309 if (reload_in_progress || reload_completed)
09e881c9 2310 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2311 else
09e881c9 2312 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2313 }
750c9258 2314
9b04c6a8
RK
2315 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2316 return memref;
2317
23b33725
RS
2318 if (inplace)
2319 {
2320 XEXP (memref, 0) = addr;
2321 return memref;
2322 }
2323
60564289
KG
2324 new_rtx = gen_rtx_MEM (mode, addr);
2325 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2326 return new_rtx;
23b2ce53 2327}
792760b9 2328
738cc472
RK
2329/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2330 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2331
2332rtx
ef4bddc2 2333change_address (rtx memref, machine_mode mode, rtx addr)
f4ef873c 2334{
23b33725 2335 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
ef4bddc2 2336 machine_mode mmode = GET_MODE (new_rtx);
d05d7551 2337 struct mem_attrs *defattrs;
4e44c1ef 2338
d05d7551 2339 mem_attrs attrs (*get_mem_attrs (memref));
f12144dd 2340 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2341 attrs.expr = NULL_TREE;
2342 attrs.offset_known_p = false;
2343 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2344 attrs.size = defattrs->size;
2345 attrs.align = defattrs->align;
c2f7bcc3 2346
fdb1c7b3 2347 /* If there are no changes, just return the original memory reference. */
60564289 2348 if (new_rtx == memref)
4e44c1ef 2349 {
f12144dd 2350 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2351 return new_rtx;
4e44c1ef 2352
60564289
KG
2353 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2354 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2355 }
fdb1c7b3 2356
f12144dd 2357 set_mem_attrs (new_rtx, &attrs);
60564289 2358 return new_rtx;
f4ef873c 2359}
792760b9 2360
738cc472
RK
2361/* Return a memory reference like MEMREF, but with its mode changed
2362 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2363 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2364 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2365 and the caller is responsible for adjusting MEMREF base register.
2366 If ADJUST_OBJECT is zero, the underlying object associated with the
2367 memory reference is left unchanged and the caller is responsible for
2368 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2369 the underlying object, even partially, then the object is dropped.
2370 SIZE, if nonzero, is the size of an access in cases where MODE
2371 has no inherent size. */
f1ec5147
RK
2372
2373rtx
d05d7551 2374adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
5f2cbd0d 2375 int validate, int adjust_address, int adjust_object,
d05d7551 2376 poly_int64 size)
f1ec5147 2377{
823e3574 2378 rtx addr = XEXP (memref, 0);
60564289 2379 rtx new_rtx;
095a2d76 2380 scalar_int_mode address_mode;
d05d7551 2381 struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
f12144dd 2382 unsigned HOST_WIDE_INT max_align;
0207fa90 2383#ifdef POINTERS_EXTEND_UNSIGNED
095a2d76 2384 scalar_int_mode pointer_mode
0207fa90
EB
2385 = targetm.addr_space.pointer_mode (attrs.addrspace);
2386#endif
823e3574 2387
ee88e690
EB
2388 /* VOIDmode means no mode change for change_address_1. */
2389 if (mode == VOIDmode)
2390 mode = GET_MODE (memref);
2391
5f2cbd0d
RS
2392 /* Take the size of non-BLKmode accesses from the mode. */
2393 defattrs = mode_mem_attrs[(int) mode];
2394 if (defattrs->size_known_p)
2395 size = defattrs->size;
2396
fdb1c7b3 2397 /* If there are no changes, just return the original memory reference. */
d05d7551
RS
2398 if (mode == GET_MODE (memref)
2399 && known_eq (offset, 0)
2400 && (known_eq (size, 0)
2401 || (attrs.size_known_p && known_eq (attrs.size, size)))
f12144dd
RS
2402 && (!validate || memory_address_addr_space_p (mode, addr,
2403 attrs.addrspace)))
fdb1c7b3
JH
2404 return memref;
2405
d14419e4 2406 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2407 This may happen even if offset is nonzero -- consider
d14419e4
RH
2408 (plus (plus reg reg) const_int) -- so do this always. */
2409 addr = copy_rtx (addr);
2410
a6fe9ed4
JM
2411 /* Convert a possibly large offset to a signed value within the
2412 range of the target address space. */
372d6395 2413 address_mode = get_address_mode (memref);
d05d7551 2414 offset = trunc_int_for_mode (offset, address_mode);
a6fe9ed4 2415
5ef0b50d 2416 if (adjust_address)
4a78c787
RH
2417 {
2418 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2419 object, we can merge it into the LO_SUM. */
d05d7551
RS
2420 if (GET_MODE (memref) != BLKmode
2421 && GET_CODE (addr) == LO_SUM
2422 && known_in_range_p (offset,
2423 0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2424 / BITS_PER_UNIT)))
d4ebfa65 2425 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2426 plus_constant (address_mode,
2427 XEXP (addr, 1), offset));
0207fa90
EB
2428#ifdef POINTERS_EXTEND_UNSIGNED
2429 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2430 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2431 the fact that pointers are not allowed to overflow. */
2432 else if (POINTERS_EXTEND_UNSIGNED > 0
2433 && GET_CODE (addr) == ZERO_EXTEND
2434 && GET_MODE (XEXP (addr, 0)) == pointer_mode
d05d7551 2435 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
0207fa90
EB
2436 addr = gen_rtx_ZERO_EXTEND (address_mode,
2437 plus_constant (pointer_mode,
2438 XEXP (addr, 0), offset));
2439#endif
4a78c787 2440 else
0a81f074 2441 addr = plus_constant (address_mode, addr, offset);
4a78c787 2442 }
823e3574 2443
23b33725 2444 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2445
09efeca1
PB
2446 /* If the address is a REG, change_address_1 rightfully returns memref,
2447 but this would destroy memref's MEM_ATTRS. */
d05d7551 2448 if (new_rtx == memref && maybe_ne (offset, 0))
09efeca1
PB
2449 new_rtx = copy_rtx (new_rtx);
2450
5ef0b50d
EB
2451 /* Conservatively drop the object if we don't know where we start from. */
2452 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2453 {
2454 attrs.expr = NULL_TREE;
2455 attrs.alias = 0;
2456 }
2457
738cc472
RK
2458 /* Compute the new values of the memory attributes due to this adjustment.
2459 We add the offsets and update the alignment. */
754c3d5d 2460 if (attrs.offset_known_p)
5ef0b50d
EB
2461 {
2462 attrs.offset += offset;
2463
2464 /* Drop the object if the new left end is not within its bounds. */
d05d7551 2465 if (adjust_object && maybe_lt (attrs.offset, 0))
5ef0b50d
EB
2466 {
2467 attrs.expr = NULL_TREE;
2468 attrs.alias = 0;
2469 }
2470 }
738cc472 2471
03bf2c23
RK
2472 /* Compute the new alignment by taking the MIN of the alignment and the
2473 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2474 if zero. */
d05d7551 2475 if (maybe_ne (offset, 0))
f12144dd 2476 {
d05d7551 2477 max_align = known_alignment (offset) * BITS_PER_UNIT;
f12144dd
RS
2478 attrs.align = MIN (attrs.align, max_align);
2479 }
738cc472 2480
d05d7551 2481 if (maybe_ne (size, 0))
754c3d5d 2482 {
5ef0b50d 2483 /* Drop the object if the new right end is not within its bounds. */
d05d7551 2484 if (adjust_object && maybe_gt (offset + size, attrs.size))
5ef0b50d
EB
2485 {
2486 attrs.expr = NULL_TREE;
2487 attrs.alias = 0;
2488 }
754c3d5d 2489 attrs.size_known_p = true;
5f2cbd0d 2490 attrs.size = size;
754c3d5d
RS
2491 }
2492 else if (attrs.size_known_p)
5ef0b50d 2493 {
5f2cbd0d 2494 gcc_assert (!adjust_object);
5ef0b50d 2495 attrs.size -= offset;
5f2cbd0d
RS
2496 /* ??? The store_by_pieces machinery generates negative sizes,
2497 so don't assert for that here. */
5ef0b50d 2498 }
10b76d73 2499
f12144dd 2500 set_mem_attrs (new_rtx, &attrs);
738cc472 2501
60564289 2502 return new_rtx;
f1ec5147
RK
2503}
2504
630036c6
JJ
2505/* Return a memory reference like MEMREF, but with its mode changed
2506 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2507 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2508 nonzero, the memory address is forced to be valid. */
2509
2510rtx
ef4bddc2 2511adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
d05d7551 2512 poly_int64 offset, int validate)
630036c6 2513{
23b33725 2514 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2515 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2516}
2517
8ac61af7
RK
2518/* Return a memory reference like MEMREF, but whose address is changed by
2519 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2520 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2521
2522rtx
502b8322 2523offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2524{
60564289 2525 rtx new_rtx, addr = XEXP (memref, 0);
ef4bddc2 2526 machine_mode address_mode;
d05d7551 2527 struct mem_attrs *defattrs;
e3c8ea67 2528
d05d7551 2529 mem_attrs attrs (*get_mem_attrs (memref));
372d6395 2530 address_mode = get_address_mode (memref);
d4ebfa65 2531 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2532
68252e27 2533 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2534 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2535
2536 However, if we did go and rearrange things, we can wind up not
2537 being able to recognize the magic around pic_offset_table_rtx.
2538 This stuff is fragile, and is yet another example of why it is
2539 bad to expose PIC machinery too early. */
f12144dd
RS
2540 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2541 attrs.addrspace)
e3c8ea67
RH
2542 && GET_CODE (addr) == PLUS
2543 && XEXP (addr, 0) == pic_offset_table_rtx)
2544 {
2545 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2546 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2547 }
2548
60564289 2549 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2550 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2551
fdb1c7b3 2552 /* If there are no changes, just return the original memory reference. */
60564289
KG
2553 if (new_rtx == memref)
2554 return new_rtx;
fdb1c7b3 2555
0d4903b8
RK
2556 /* Update the alignment to reflect the offset. Reset the offset, which
2557 we don't know. */
754c3d5d
RS
2558 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2559 attrs.offset_known_p = false;
2560 attrs.size_known_p = defattrs->size_known_p;
2561 attrs.size = defattrs->size;
f12144dd
RS
2562 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2563 set_mem_attrs (new_rtx, &attrs);
60564289 2564 return new_rtx;
0d4903b8 2565}
68252e27 2566
792760b9
RK
2567/* Return a memory reference like MEMREF, but with its address changed to
2568 ADDR. The caller is asserting that the actual piece of memory pointed
2569 to is the same, just the form of the address is being changed, such as
23b33725
RS
2570 by putting something into a register. INPLACE is true if any changes
2571 can be made directly to MEMREF or false if MEMREF must be treated as
2572 immutable. */
792760b9
RK
2573
2574rtx
23b33725 2575replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2576{
738cc472
RK
2577 /* change_address_1 copies the memory attribute structure without change
2578 and that's exactly what we want here. */
40c0668b 2579 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2580 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2581}
738cc472 2582
f1ec5147
RK
2583/* Likewise, but the reference is not required to be valid. */
2584
2585rtx
23b33725 2586replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2587{
23b33725 2588 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2589}
e7dfe4bb
RH
2590
2591/* Return a memory reference like MEMREF, but with its mode widened to
2592 MODE and offset by OFFSET. This would be used by targets that e.g.
2593 cannot issue QImode memory operations and have to use SImode memory
2594 operations plus masking logic. */
2595
2596rtx
d05d7551 2597widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
e7dfe4bb 2598{
5f2cbd0d 2599 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
cf098191 2600 poly_uint64 size = GET_MODE_SIZE (mode);
e7dfe4bb 2601
fdb1c7b3 2602 /* If there are no changes, just return the original memory reference. */
60564289
KG
2603 if (new_rtx == memref)
2604 return new_rtx;
fdb1c7b3 2605
d05d7551 2606 mem_attrs attrs (*get_mem_attrs (new_rtx));
f12144dd 2607
e7dfe4bb
RH
2608 /* If we don't know what offset we were at within the expression, then
2609 we can't know if we've overstepped the bounds. */
754c3d5d 2610 if (! attrs.offset_known_p)
f12144dd 2611 attrs.expr = NULL_TREE;
e7dfe4bb 2612
f12144dd 2613 while (attrs.expr)
e7dfe4bb 2614 {
f12144dd 2615 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2616 {
f12144dd
RS
2617 tree field = TREE_OPERAND (attrs.expr, 1);
2618 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2619
2620 if (! DECL_SIZE_UNIT (field))
2621 {
f12144dd 2622 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2623 break;
2624 }
2625
2626 /* Is the field at least as large as the access? If so, ok,
2627 otherwise strip back to the containing structure. */
d05d7551
RS
2628 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2629 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2630 && known_ge (attrs.offset, 0))
e7dfe4bb
RH
2631 break;
2632
d05d7551
RS
2633 poly_uint64 suboffset;
2634 if (!poly_int_tree_p (offset, &suboffset))
e7dfe4bb 2635 {
f12144dd 2636 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2637 break;
2638 }
2639
f12144dd 2640 attrs.expr = TREE_OPERAND (attrs.expr, 0);
d05d7551 2641 attrs.offset += suboffset;
ae7e9ddd 2642 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2643 / BITS_PER_UNIT);
e7dfe4bb
RH
2644 }
2645 /* Similarly for the decl. */
f12144dd
RS
2646 else if (DECL_P (attrs.expr)
2647 && DECL_SIZE_UNIT (attrs.expr)
d05d7551
RS
2648 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2649 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2650 size)
2651 && known_ge (attrs.offset, 0))
e7dfe4bb
RH
2652 break;
2653 else
2654 {
2655 /* The widened memory access overflows the expression, which means
2656 that it could alias another expression. Zap it. */
f12144dd 2657 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2658 break;
2659 }
2660 }
2661
f12144dd 2662 if (! attrs.expr)
754c3d5d 2663 attrs.offset_known_p = false;
e7dfe4bb
RH
2664
2665 /* The widened memory may alias other stuff, so zap the alias set. */
2666 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2667 attrs.alias = 0;
754c3d5d
RS
2668 attrs.size_known_p = true;
2669 attrs.size = size;
f12144dd 2670 set_mem_attrs (new_rtx, &attrs);
60564289 2671 return new_rtx;
e7dfe4bb 2672}
23b2ce53 2673\f
f6129d66
RH
2674/* A fake decl that is used as the MEM_EXPR of spill slots. */
2675static GTY(()) tree spill_slot_decl;
2676
3d7e23f6
RH
2677tree
2678get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2679{
2680 tree d = spill_slot_decl;
2681 rtx rd;
2682
3d7e23f6 2683 if (d || !force_build_p)
f6129d66
RH
2684 return d;
2685
c2255bc4
AH
2686 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2687 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2688 DECL_ARTIFICIAL (d) = 1;
2689 DECL_IGNORED_P (d) = 1;
2690 TREE_USED (d) = 1;
f6129d66
RH
2691 spill_slot_decl = d;
2692
2693 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2694 MEM_NOTRAP_P (rd) = 1;
d05d7551 2695 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
f12144dd
RS
2696 attrs.alias = new_alias_set ();
2697 attrs.expr = d;
2698 set_mem_attrs (rd, &attrs);
f6129d66
RH
2699 SET_DECL_RTL (d, rd);
2700
2701 return d;
2702}
2703
2704/* Given MEM, a result from assign_stack_local, fill in the memory
2705 attributes as appropriate for a register allocator spill slot.
2706 These slots are not aliasable by other memory. We arrange for
2707 them all to use a single MEM_EXPR, so that the aliasing code can
2708 work properly in the case of shared spill slots. */
2709
2710void
2711set_mem_attrs_for_spill (rtx mem)
2712{
f12144dd 2713 rtx addr;
f6129d66 2714
d05d7551 2715 mem_attrs attrs (*get_mem_attrs (mem));
f12144dd
RS
2716 attrs.expr = get_spill_slot_decl (true);
2717 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2718 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2719
2720 /* We expect the incoming memory to be of the form:
2721 (mem:MODE (plus (reg sfp) (const_int offset)))
2722 with perhaps the plus missing for offset = 0. */
2723 addr = XEXP (mem, 0);
754c3d5d 2724 attrs.offset_known_p = true;
d05d7551 2725 strip_offset (addr, &attrs.offset);
f6129d66 2726
f12144dd 2727 set_mem_attrs (mem, &attrs);
f6129d66
RH
2728 MEM_NOTRAP_P (mem) = 1;
2729}
2730\f
23b2ce53
RS
2731/* Return a newly created CODE_LABEL rtx with a unique label number. */
2732
7dcc3ab5 2733rtx_code_label *
502b8322 2734gen_label_rtx (void)
23b2ce53 2735{
7dcc3ab5
DM
2736 return as_a <rtx_code_label *> (
2737 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2738 NULL, label_num++, NULL));
23b2ce53
RS
2739}
2740\f
2741/* For procedure integration. */
2742
23b2ce53 2743/* Install new pointers to the first and last insns in the chain.
86fe05e0 2744 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2745 Used for an inline-procedure after copying the insn chain. */
2746
2747void
fee3e72c 2748set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2749{
fee3e72c 2750 rtx_insn *insn;
86fe05e0 2751
5936d944
JH
2752 set_first_insn (first);
2753 set_last_insn (last);
86fe05e0
RK
2754 cur_insn_uid = 0;
2755
b5b8b0ac
AO
2756 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2757 {
2758 int debug_count = 0;
2759
2760 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2761 cur_debug_insn_uid = 0;
2762
2763 for (insn = first; insn; insn = NEXT_INSN (insn))
2764 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2765 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2766 else
2767 {
2768 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2769 if (DEBUG_INSN_P (insn))
2770 debug_count++;
2771 }
2772
2773 if (debug_count)
2774 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2775 else
2776 cur_debug_insn_uid++;
2777 }
2778 else
2779 for (insn = first; insn; insn = NEXT_INSN (insn))
2780 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2781
2782 cur_insn_uid++;
23b2ce53 2783}
23b2ce53 2784\f
750c9258 2785/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2786 structure. This routine should only be called once. */
23b2ce53 2787
fd743bc1 2788static void
6bb9bf63 2789unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2790{
d1b81779 2791 /* Unshare just about everything else. */
2c07f13b 2792 unshare_all_rtl_in_chain (insn);
750c9258 2793
23b2ce53
RS
2794 /* Make sure the addresses of stack slots found outside the insn chain
2795 (such as, in DECL_RTL of a variable) are not shared
2796 with the insn chain.
2797
2798 This special care is necessary when the stack slot MEM does not
2799 actually appear in the insn chain. If it does appear, its address
2800 is unshared from all else at that point. */
8c39f8ae
TS
2801 unsigned int i;
2802 rtx temp;
2803 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2804 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
23b2ce53
RS
2805}
2806
750c9258 2807/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2808 structure, again. This is a fairly expensive thing to do so it
2809 should be done sparingly. */
2810
2811void
6bb9bf63 2812unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2813{
6bb9bf63 2814 rtx_insn *p;
624c87aa
RE
2815 tree decl;
2816
d1b81779 2817 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2818 if (INSN_P (p))
d1b81779
GK
2819 {
2820 reset_used_flags (PATTERN (p));
2821 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2822 if (CALL_P (p))
2823 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2824 }
624c87aa 2825
2d4aecb3 2826 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2827 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2828
624c87aa 2829 /* Make sure that virtual parameters are not shared. */
910ad8de 2830 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2831 set_used_flags (DECL_RTL (decl));
624c87aa 2832
8c39f8ae
TS
2833 rtx temp;
2834 unsigned int i;
2835 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2836 reset_used_flags (temp);
624c87aa 2837
b4aaa77b 2838 unshare_all_rtl_1 (insn);
fd743bc1
PB
2839}
2840
c2924966 2841unsigned int
fd743bc1
PB
2842unshare_all_rtl (void)
2843{
b4aaa77b 2844 unshare_all_rtl_1 (get_insns ());
60ebe8ce
JJ
2845
2846 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2847 {
2848 if (DECL_RTL_SET_P (decl))
2849 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2850 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2851 }
2852
c2924966 2853 return 0;
d1b81779
GK
2854}
2855
ef330312 2856
2c07f13b
JH
2857/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2858 Recursively does the same for subexpressions. */
2859
2860static void
2861verify_rtx_sharing (rtx orig, rtx insn)
2862{
2863 rtx x = orig;
2864 int i;
2865 enum rtx_code code;
2866 const char *format_ptr;
2867
2868 if (x == 0)
2869 return;
2870
2871 code = GET_CODE (x);
2872
2873 /* These types may be freely shared. */
2874
2875 switch (code)
2876 {
2877 case REG:
0ca5af51
AO
2878 case DEBUG_EXPR:
2879 case VALUE:
d8116890 2880 CASE_CONST_ANY:
2c07f13b
JH
2881 case SYMBOL_REF:
2882 case LABEL_REF:
2883 case CODE_LABEL:
2884 case PC:
2885 case CC0:
3810076b 2886 case RETURN:
26898771 2887 case SIMPLE_RETURN:
2c07f13b 2888 case SCRATCH:
3e89ed8d 2889 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2890 return;
3e89ed8d 2891 case CLOBBER:
14196e02 2892 case CLOBBER_HIGH:
c5c5ba89
JH
2893 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2894 clobbers or clobbers of hard registers that originated as pseudos.
2895 This is needed to allow safe register renaming. */
d7ae3739
EB
2896 if (REG_P (XEXP (x, 0))
2897 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2898 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3e89ed8d
JH
2899 return;
2900 break;
2c07f13b
JH
2901
2902 case CONST:
6fb5fa3c 2903 if (shared_const_p (orig))
2c07f13b
JH
2904 return;
2905 break;
2906
2907 case MEM:
2908 /* A MEM is allowed to be shared if its address is constant. */
2909 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2910 || reload_completed || reload_in_progress)
2911 return;
2912
2913 break;
2914
2915 default:
2916 break;
2917 }
2918
2919 /* This rtx may not be shared. If it has already been seen,
2920 replace it with a copy of itself. */
b2b29377 2921 if (flag_checking && RTX_FLAG (x, used))
2c07f13b 2922 {
ab532386 2923 error ("invalid rtl sharing found in the insn");
2c07f13b 2924 debug_rtx (insn);
ab532386 2925 error ("shared rtx");
2c07f13b 2926 debug_rtx (x);
ab532386 2927 internal_error ("internal consistency failure");
2c07f13b 2928 }
1a2caa7a 2929 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2930
2c07f13b
JH
2931 RTX_FLAG (x, used) = 1;
2932
6614fd40 2933 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2934
2935 format_ptr = GET_RTX_FORMAT (code);
2936
2937 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2938 {
2939 switch (*format_ptr++)
2940 {
2941 case 'e':
2942 verify_rtx_sharing (XEXP (x, i), insn);
2943 break;
2944
2945 case 'E':
2946 if (XVEC (x, i) != NULL)
2947 {
2948 int j;
2949 int len = XVECLEN (x, i);
2950
2951 for (j = 0; j < len; j++)
2952 {
1a2caa7a
NS
2953 /* We allow sharing of ASM_OPERANDS inside single
2954 instruction. */
2c07f13b 2955 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2956 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2957 == ASM_OPERANDS))
2c07f13b
JH
2958 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2959 else
2960 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2961 }
2962 }
2963 break;
2964 }
2965 }
2966 return;
2967}
2968
0e0f87d4
SB
2969/* Reset used-flags for INSN. */
2970
2971static void
2972reset_insn_used_flags (rtx insn)
2973{
2974 gcc_assert (INSN_P (insn));
2975 reset_used_flags (PATTERN (insn));
2976 reset_used_flags (REG_NOTES (insn));
2977 if (CALL_P (insn))
2978 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2979}
2980
a24243a0 2981/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2982
a24243a0
AK
2983static void
2984reset_all_used_flags (void)
2c07f13b 2985{
dc01c3d1 2986 rtx_insn *p;
2c07f13b
JH
2987
2988 for (p = get_insns (); p; p = NEXT_INSN (p))
2989 if (INSN_P (p))
2990 {
0e0f87d4
SB
2991 rtx pat = PATTERN (p);
2992 if (GET_CODE (pat) != SEQUENCE)
2993 reset_insn_used_flags (p);
2994 else
2954a813 2995 {
0e0f87d4
SB
2996 gcc_assert (REG_NOTES (p) == NULL);
2997 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2998 {
2999 rtx insn = XVECEXP (pat, 0, i);
3000 if (INSN_P (insn))
3001 reset_insn_used_flags (insn);
3002 }
2954a813 3003 }
2c07f13b 3004 }
a24243a0
AK
3005}
3006
0e0f87d4
SB
3007/* Verify sharing in INSN. */
3008
3009static void
3010verify_insn_sharing (rtx insn)
3011{
3012 gcc_assert (INSN_P (insn));
4b498f72
JJ
3013 verify_rtx_sharing (PATTERN (insn), insn);
3014 verify_rtx_sharing (REG_NOTES (insn), insn);
0e0f87d4 3015 if (CALL_P (insn))
4b498f72 3016 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
0e0f87d4
SB
3017}
3018
a24243a0
AK
3019/* Go through all the RTL insn bodies and check that there is no unexpected
3020 sharing in between the subexpressions. */
3021
3022DEBUG_FUNCTION void
3023verify_rtl_sharing (void)
3024{
dc01c3d1 3025 rtx_insn *p;
a24243a0
AK
3026
3027 timevar_push (TV_VERIFY_RTL_SHARING);
3028
3029 reset_all_used_flags ();
2c07f13b
JH
3030
3031 for (p = get_insns (); p; p = NEXT_INSN (p))
3032 if (INSN_P (p))
3033 {
0e0f87d4
SB
3034 rtx pat = PATTERN (p);
3035 if (GET_CODE (pat) != SEQUENCE)
3036 verify_insn_sharing (p);
3037 else
3038 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
3039 {
3040 rtx insn = XVECEXP (pat, 0, i);
3041 if (INSN_P (insn))
3042 verify_insn_sharing (insn);
3043 }
2c07f13b 3044 }
a222c01a 3045
a24243a0
AK
3046 reset_all_used_flags ();
3047
a222c01a 3048 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
3049}
3050
d1b81779
GK
3051/* Go through all the RTL insn bodies and copy any invalid shared structure.
3052 Assumes the mark bits are cleared at entry. */
3053
2c07f13b 3054void
dc01c3d1 3055unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
3056{
3057 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 3058 if (INSN_P (insn))
d1b81779
GK
3059 {
3060 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3061 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
3062 if (CALL_P (insn))
3063 CALL_INSN_FUNCTION_USAGE (insn)
3064 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
3065 }
3066}
3067
2d4aecb3 3068/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
3069 shared. We never replace the DECL_RTLs themselves with a copy,
3070 but expressions mentioned into a DECL_RTL cannot be shared with
3071 expressions in the instruction stream.
3072
3073 Note that reload may convert pseudo registers into memories in-place.
3074 Pseudo registers are always shared, but MEMs never are. Thus if we
3075 reset the used flags on MEMs in the instruction stream, we must set
3076 them again on MEMs that appear in DECL_RTLs. */
3077
2d4aecb3 3078static void
5eb2a9f2 3079set_used_decls (tree blk)
2d4aecb3
AO
3080{
3081 tree t;
3082
3083 /* Mark decls. */
910ad8de 3084 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 3085 if (DECL_RTL_SET_P (t))
5eb2a9f2 3086 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
3087
3088 /* Now process sub-blocks. */
87caf699 3089 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 3090 set_used_decls (t);
2d4aecb3
AO
3091}
3092
23b2ce53 3093/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
3094 Recursively does the same for subexpressions. Uses
3095 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
3096
3097rtx
502b8322 3098copy_rtx_if_shared (rtx orig)
23b2ce53 3099{
32b32b16
AP
3100 copy_rtx_if_shared_1 (&orig);
3101 return orig;
3102}
3103
ff954f39
AP
3104/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3105 use. Recursively does the same for subexpressions. */
3106
32b32b16
AP
3107static void
3108copy_rtx_if_shared_1 (rtx *orig1)
3109{
3110 rtx x;
b3694847
SS
3111 int i;
3112 enum rtx_code code;
32b32b16 3113 rtx *last_ptr;
b3694847 3114 const char *format_ptr;
23b2ce53 3115 int copied = 0;
32b32b16
AP
3116 int length;
3117
3118 /* Repeat is used to turn tail-recursion into iteration. */
3119repeat:
3120 x = *orig1;
23b2ce53
RS
3121
3122 if (x == 0)
32b32b16 3123 return;
23b2ce53
RS
3124
3125 code = GET_CODE (x);
3126
3127 /* These types may be freely shared. */
3128
3129 switch (code)
3130 {
3131 case REG:
0ca5af51
AO
3132 case DEBUG_EXPR:
3133 case VALUE:
d8116890 3134 CASE_CONST_ANY:
23b2ce53 3135 case SYMBOL_REF:
2c07f13b 3136 case LABEL_REF:
23b2ce53
RS
3137 case CODE_LABEL:
3138 case PC:
3139 case CC0:
276e0224 3140 case RETURN:
26898771 3141 case SIMPLE_RETURN:
23b2ce53 3142 case SCRATCH:
0f41302f 3143 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 3144 return;
3e89ed8d 3145 case CLOBBER:
14196e02 3146 case CLOBBER_HIGH:
c5c5ba89
JH
3147 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3148 clobbers or clobbers of hard registers that originated as pseudos.
3149 This is needed to allow safe register renaming. */
d7ae3739
EB
3150 if (REG_P (XEXP (x, 0))
3151 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3152 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3e89ed8d
JH
3153 return;
3154 break;
23b2ce53 3155
b851ea09 3156 case CONST:
6fb5fa3c 3157 if (shared_const_p (x))
32b32b16 3158 return;
b851ea09
RK
3159 break;
3160
b5b8b0ac 3161 case DEBUG_INSN:
23b2ce53
RS
3162 case INSN:
3163 case JUMP_INSN:
3164 case CALL_INSN:
3165 case NOTE:
23b2ce53
RS
3166 case BARRIER:
3167 /* The chain of insns is not being copied. */
32b32b16 3168 return;
23b2ce53 3169
e9a25f70
JL
3170 default:
3171 break;
23b2ce53
RS
3172 }
3173
3174 /* This rtx may not be shared. If it has already been seen,
3175 replace it with a copy of itself. */
3176
2adc7f12 3177 if (RTX_FLAG (x, used))
23b2ce53 3178 {
aacd3885 3179 x = shallow_copy_rtx (x);
23b2ce53
RS
3180 copied = 1;
3181 }
2adc7f12 3182 RTX_FLAG (x, used) = 1;
23b2ce53
RS
3183
3184 /* Now scan the subexpressions recursively.
3185 We can store any replaced subexpressions directly into X
3186 since we know X is not shared! Any vectors in X
3187 must be copied if X was copied. */
3188
3189 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
3190 length = GET_RTX_LENGTH (code);
3191 last_ptr = NULL;
b8698a0f 3192
32b32b16 3193 for (i = 0; i < length; i++)
23b2ce53
RS
3194 {
3195 switch (*format_ptr++)
3196 {
3197 case 'e':
32b32b16
AP
3198 if (last_ptr)
3199 copy_rtx_if_shared_1 (last_ptr);
3200 last_ptr = &XEXP (x, i);
23b2ce53
RS
3201 break;
3202
3203 case 'E':
3204 if (XVEC (x, i) != NULL)
3205 {
b3694847 3206 int j;
f0722107 3207 int len = XVECLEN (x, i);
b8698a0f 3208
6614fd40
KH
3209 /* Copy the vector iff I copied the rtx and the length
3210 is nonzero. */
f0722107 3211 if (copied && len > 0)
8f985ec4 3212 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 3213
5d3cc252 3214 /* Call recursively on all inside the vector. */
f0722107 3215 for (j = 0; j < len; j++)
32b32b16
AP
3216 {
3217 if (last_ptr)
3218 copy_rtx_if_shared_1 (last_ptr);
3219 last_ptr = &XVECEXP (x, i, j);
3220 }
23b2ce53
RS
3221 }
3222 break;
3223 }
3224 }
32b32b16
AP
3225 *orig1 = x;
3226 if (last_ptr)
3227 {
3228 orig1 = last_ptr;
3229 goto repeat;
3230 }
3231 return;
23b2ce53
RS
3232}
3233
76369a82 3234/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 3235
76369a82
NF
3236static void
3237mark_used_flags (rtx x, int flag)
23b2ce53 3238{
b3694847
SS
3239 int i, j;
3240 enum rtx_code code;
3241 const char *format_ptr;
32b32b16 3242 int length;
23b2ce53 3243
32b32b16
AP
3244 /* Repeat is used to turn tail-recursion into iteration. */
3245repeat:
23b2ce53
RS
3246 if (x == 0)
3247 return;
3248
3249 code = GET_CODE (x);
3250
9faa82d8 3251 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3252 for them. */
3253
3254 switch (code)
3255 {
3256 case REG:
0ca5af51
AO
3257 case DEBUG_EXPR:
3258 case VALUE:
d8116890 3259 CASE_CONST_ANY:
23b2ce53
RS
3260 case SYMBOL_REF:
3261 case CODE_LABEL:
3262 case PC:
3263 case CC0:
276e0224 3264 case RETURN:
26898771 3265 case SIMPLE_RETURN:
23b2ce53
RS
3266 return;
3267
b5b8b0ac 3268 case DEBUG_INSN:
23b2ce53
RS
3269 case INSN:
3270 case JUMP_INSN:
3271 case CALL_INSN:
3272 case NOTE:
3273 case LABEL_REF:
3274 case BARRIER:
3275 /* The chain of insns is not being copied. */
3276 return;
750c9258 3277
e9a25f70
JL
3278 default:
3279 break;
23b2ce53
RS
3280 }
3281
76369a82 3282 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3283
3284 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3285 length = GET_RTX_LENGTH (code);
b8698a0f 3286
32b32b16 3287 for (i = 0; i < length; i++)
23b2ce53
RS
3288 {
3289 switch (*format_ptr++)
3290 {
3291 case 'e':
32b32b16
AP
3292 if (i == length-1)
3293 {
3294 x = XEXP (x, i);
3295 goto repeat;
3296 }
76369a82 3297 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3298 break;
3299
3300 case 'E':
3301 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3302 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3303 break;
3304 }
3305 }
3306}
2c07f13b 3307
76369a82 3308/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3309 to look for shared sub-parts. */
3310
3311void
76369a82 3312reset_used_flags (rtx x)
2c07f13b 3313{
76369a82
NF
3314 mark_used_flags (x, 0);
3315}
2c07f13b 3316
76369a82
NF
3317/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3318 to look for shared sub-parts. */
2c07f13b 3319
76369a82
NF
3320void
3321set_used_flags (rtx x)
3322{
3323 mark_used_flags (x, 1);
2c07f13b 3324}
23b2ce53
RS
3325\f
3326/* Copy X if necessary so that it won't be altered by changes in OTHER.
3327 Return X or the rtx for the pseudo reg the value of X was copied into.
3328 OTHER must be valid as a SET_DEST. */
3329
3330rtx
502b8322 3331make_safe_from (rtx x, rtx other)
23b2ce53
RS
3332{
3333 while (1)
3334 switch (GET_CODE (other))
3335 {
3336 case SUBREG:
3337 other = SUBREG_REG (other);
3338 break;
3339 case STRICT_LOW_PART:
3340 case SIGN_EXTEND:
3341 case ZERO_EXTEND:
3342 other = XEXP (other, 0);
3343 break;
3344 default:
3345 goto done;
3346 }
3347 done:
3c0cb5de 3348 if ((MEM_P (other)
23b2ce53 3349 && ! CONSTANT_P (x)
f8cfc6aa 3350 && !REG_P (x)
23b2ce53 3351 && GET_CODE (x) != SUBREG)
f8cfc6aa 3352 || (REG_P (other)
23b2ce53
RS
3353 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3354 || reg_mentioned_p (other, x))))
3355 {
3356 rtx temp = gen_reg_rtx (GET_MODE (x));
3357 emit_move_insn (temp, x);
3358 return temp;
3359 }
3360 return x;
3361}
3362\f
3363/* Emission of insns (adding them to the doubly-linked list). */
3364
23b2ce53
RS
3365/* Return the last insn emitted, even if it is in a sequence now pushed. */
3366
db76cf1e 3367rtx_insn *
502b8322 3368get_last_insn_anywhere (void)
23b2ce53 3369{
614d5bd8
AM
3370 struct sequence_stack *seq;
3371 for (seq = get_current_sequence (); seq; seq = seq->next)
3372 if (seq->last != 0)
3373 return seq->last;
23b2ce53
RS
3374 return 0;
3375}
3376
2a496e8b
JDA
3377/* Return the first nonnote insn emitted in current sequence or current
3378 function. This routine looks inside SEQUENCEs. */
3379
e4685bc8 3380rtx_insn *
502b8322 3381get_first_nonnote_insn (void)
2a496e8b 3382{
dc01c3d1 3383 rtx_insn *insn = get_insns ();
91373fe8
JDA
3384
3385 if (insn)
3386 {
3387 if (NOTE_P (insn))
3388 for (insn = next_insn (insn);
3389 insn && NOTE_P (insn);
3390 insn = next_insn (insn))
3391 continue;
3392 else
3393 {
2ca202e7 3394 if (NONJUMP_INSN_P (insn)
91373fe8 3395 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3396 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3397 }
3398 }
2a496e8b
JDA
3399
3400 return insn;
3401}
3402
3403/* Return the last nonnote insn emitted in current sequence or current
3404 function. This routine looks inside SEQUENCEs. */
3405
e4685bc8 3406rtx_insn *
502b8322 3407get_last_nonnote_insn (void)
2a496e8b 3408{
dc01c3d1 3409 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3410
3411 if (insn)
3412 {
3413 if (NOTE_P (insn))
3414 for (insn = previous_insn (insn);
3415 insn && NOTE_P (insn);
3416 insn = previous_insn (insn))
3417 continue;
3418 else
3419 {
dc01c3d1
DM
3420 if (NONJUMP_INSN_P (insn))
3421 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3422 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3423 }
3424 }
2a496e8b
JDA
3425
3426 return insn;
3427}
3428
b5b8b0ac
AO
3429/* Return the number of actual (non-debug) insns emitted in this
3430 function. */
3431
3432int
3433get_max_insn_count (void)
3434{
3435 int n = cur_insn_uid;
3436
3437 /* The table size must be stable across -g, to avoid codegen
3438 differences due to debug insns, and not be affected by
3439 -fmin-insn-uid, to avoid excessive table size and to simplify
3440 debugging of -fcompare-debug failures. */
3441 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3442 n -= cur_debug_insn_uid;
3443 else
3444 n -= MIN_NONDEBUG_INSN_UID;
3445
3446 return n;
3447}
3448
23b2ce53
RS
3449\f
3450/* Return the next insn. If it is a SEQUENCE, return the first insn
3451 of the sequence. */
3452
eb51c837 3453rtx_insn *
4ce524a1 3454next_insn (rtx_insn *insn)
23b2ce53 3455{
75547801
KG
3456 if (insn)
3457 {
3458 insn = NEXT_INSN (insn);
3459 if (insn && NONJUMP_INSN_P (insn)
3460 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3461 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3462 }
23b2ce53 3463
dc01c3d1 3464 return insn;
23b2ce53
RS
3465}
3466
3467/* Return the previous insn. If it is a SEQUENCE, return the last insn
3468 of the sequence. */
3469
eb51c837 3470rtx_insn *
4ce524a1 3471previous_insn (rtx_insn *insn)
23b2ce53 3472{
75547801
KG
3473 if (insn)
3474 {
3475 insn = PREV_INSN (insn);
dc01c3d1
DM
3476 if (insn && NONJUMP_INSN_P (insn))
3477 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3478 insn = seq->insn (seq->len () - 1);
75547801 3479 }
23b2ce53 3480
dc01c3d1 3481 return insn;
23b2ce53
RS
3482}
3483
3484/* Return the next insn after INSN that is not a NOTE. This routine does not
3485 look inside SEQUENCEs. */
3486
eb51c837 3487rtx_insn *
c9b0a227 3488next_nonnote_insn (rtx_insn *insn)
23b2ce53 3489{
75547801
KG
3490 while (insn)
3491 {
3492 insn = NEXT_INSN (insn);
3493 if (insn == 0 || !NOTE_P (insn))
3494 break;
3495 }
23b2ce53 3496
dc01c3d1 3497 return insn;
23b2ce53
RS
3498}
3499
f40dd646
AO
3500/* Return the next insn after INSN that is not a DEBUG_INSN. This
3501 routine does not look inside SEQUENCEs. */
1e211590 3502
eb51c837 3503rtx_insn *
f40dd646 3504next_nondebug_insn (rtx_insn *insn)
1e211590
DD
3505{
3506 while (insn)
3507 {
3508 insn = NEXT_INSN (insn);
f40dd646 3509 if (insn == 0 || !DEBUG_INSN_P (insn))
1e211590 3510 break;
1e211590
DD
3511 }
3512
dc01c3d1 3513 return insn;
1e211590
DD
3514}
3515
23b2ce53
RS
3516/* Return the previous insn before INSN that is not a NOTE. This routine does
3517 not look inside SEQUENCEs. */
3518
eb51c837 3519rtx_insn *
c9b0a227 3520prev_nonnote_insn (rtx_insn *insn)
23b2ce53 3521{
75547801
KG
3522 while (insn)
3523 {
3524 insn = PREV_INSN (insn);
3525 if (insn == 0 || !NOTE_P (insn))
3526 break;
3527 }
23b2ce53 3528
dc01c3d1 3529 return insn;
23b2ce53
RS
3530}
3531
f40dd646
AO
3532/* Return the previous insn before INSN that is not a DEBUG_INSN.
3533 This routine does not look inside SEQUENCEs. */
896aa4ea 3534
eb51c837 3535rtx_insn *
f40dd646 3536prev_nondebug_insn (rtx_insn *insn)
896aa4ea
DD
3537{
3538 while (insn)
3539 {
3540 insn = PREV_INSN (insn);
f40dd646 3541 if (insn == 0 || !DEBUG_INSN_P (insn))
896aa4ea 3542 break;
896aa4ea
DD
3543 }
3544
dc01c3d1 3545 return insn;
896aa4ea
DD
3546}
3547
f40dd646
AO
3548/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3549 This routine does not look inside SEQUENCEs. */
b5b8b0ac 3550
eb51c837 3551rtx_insn *
f40dd646 3552next_nonnote_nondebug_insn (rtx_insn *insn)
b5b8b0ac
AO
3553{
3554 while (insn)
3555 {
3556 insn = NEXT_INSN (insn);
f40dd646 3557 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
b5b8b0ac
AO
3558 break;
3559 }
3560
dc01c3d1 3561 return insn;
b5b8b0ac
AO
3562}
3563
f40dd646
AO
3564/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3565 but stop the search before we enter another basic block. This
3566 routine does not look inside SEQUENCEs. */
b5b8b0ac 3567
eb51c837 3568rtx_insn *
f40dd646 3569next_nonnote_nondebug_insn_bb (rtx_insn *insn)
b5b8b0ac
AO
3570{
3571 while (insn)
3572 {
f40dd646
AO
3573 insn = NEXT_INSN (insn);
3574 if (insn == 0)
3575 break;
3576 if (DEBUG_INSN_P (insn))
3577 continue;
3578 if (!NOTE_P (insn))
b5b8b0ac 3579 break;
f40dd646
AO
3580 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3581 return NULL;
b5b8b0ac
AO
3582 }
3583
dc01c3d1 3584 return insn;
b5b8b0ac
AO
3585}
3586
f40dd646 3587/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
f0fc0803
JJ
3588 This routine does not look inside SEQUENCEs. */
3589
eb51c837 3590rtx_insn *
f40dd646 3591prev_nonnote_nondebug_insn (rtx_insn *insn)
f0fc0803
JJ
3592{
3593 while (insn)
3594 {
f40dd646 3595 insn = PREV_INSN (insn);
f0fc0803
JJ
3596 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3597 break;
3598 }
3599
dc01c3d1 3600 return insn;
f0fc0803
JJ
3601}
3602
f40dd646
AO
3603/* Return the previous insn before INSN that is not a NOTE nor
3604 DEBUG_INSN, but stop the search before we enter another basic
3605 block. This routine does not look inside SEQUENCEs. */
f0fc0803 3606
eb51c837 3607rtx_insn *
f40dd646 3608prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
f0fc0803
JJ
3609{
3610 while (insn)
3611 {
3612 insn = PREV_INSN (insn);
f40dd646 3613 if (insn == 0)
f0fc0803 3614 break;
f40dd646
AO
3615 if (DEBUG_INSN_P (insn))
3616 continue;
3617 if (!NOTE_P (insn))
3618 break;
3619 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3620 return NULL;
f0fc0803
JJ
3621 }
3622
dc01c3d1 3623 return insn;
f0fc0803
JJ
3624}
3625
ec2d7121 3626/* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
23b2ce53 3627 or 0, if there is none. This routine does not look inside
0f41302f 3628 SEQUENCEs. */
23b2ce53 3629
eb51c837 3630rtx_insn *
4dea3bff 3631next_real_insn (rtx_insn *insn)
23b2ce53 3632{
75547801
KG
3633 while (insn)
3634 {
3635 insn = NEXT_INSN (insn);
3636 if (insn == 0 || INSN_P (insn))
3637 break;
3638 }
23b2ce53 3639
dc01c3d1 3640 return insn;
23b2ce53
RS
3641}
3642
ec2d7121 3643/* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
23b2ce53
RS
3644 or 0, if there is none. This routine does not look inside
3645 SEQUENCEs. */
3646
eb51c837 3647rtx_insn *
d8fd56b2 3648prev_real_insn (rtx_insn *insn)
23b2ce53 3649{
75547801
KG
3650 while (insn)
3651 {
3652 insn = PREV_INSN (insn);
3653 if (insn == 0 || INSN_P (insn))
3654 break;
3655 }
23b2ce53 3656
dc01c3d1 3657 return insn;
23b2ce53
RS
3658}
3659
ec2d7121
JJ
3660/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3661 or 0, if there is none. This routine does not look inside
3662 SEQUENCEs. */
3663
3664rtx_insn *
3665next_real_nondebug_insn (rtx uncast_insn)
3666{
3667 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3668
3669 while (insn)
3670 {
3671 insn = NEXT_INSN (insn);
3672 if (insn == 0 || NONDEBUG_INSN_P (insn))
3673 break;
3674 }
3675
3676 return insn;
3677}
3678
3679/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3680 or 0, if there is none. This routine does not look inside
3681 SEQUENCEs. */
3682
3683rtx_insn *
3684prev_real_nondebug_insn (rtx_insn *insn)
3685{
3686 while (insn)
3687 {
3688 insn = PREV_INSN (insn);
3689 if (insn == 0 || NONDEBUG_INSN_P (insn))
3690 break;
3691 }
3692
3693 return insn;
3694}
3695
ee960939
OH
3696/* Return the last CALL_INSN in the current list, or 0 if there is none.
3697 This routine does not look inside SEQUENCEs. */
3698
049cfc4a 3699rtx_call_insn *
502b8322 3700last_call_insn (void)
ee960939 3701{
049cfc4a 3702 rtx_insn *insn;
ee960939
OH
3703
3704 for (insn = get_last_insn ();
4b4bf941 3705 insn && !CALL_P (insn);
ee960939
OH
3706 insn = PREV_INSN (insn))
3707 ;
3708
049cfc4a 3709 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3710}
3711
23b2ce53 3712/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3713 does not look inside SEQUENCEs. After reload this also skips over
3714 standalone USE and CLOBBER insn. */
23b2ce53 3715
69732dcb 3716int
7c9796ed 3717active_insn_p (const rtx_insn *insn)
69732dcb 3718{
4b4bf941 3719 return (CALL_P (insn) || JUMP_P (insn)
39718607 3720 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3721 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3722 && (! reload_completed
3723 || (GET_CODE (PATTERN (insn)) != USE
3724 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3725}
3726
eb51c837 3727rtx_insn *
7c9796ed 3728next_active_insn (rtx_insn *insn)
23b2ce53 3729{
75547801
KG
3730 while (insn)
3731 {
3732 insn = NEXT_INSN (insn);
3733 if (insn == 0 || active_insn_p (insn))
3734 break;
3735 }
23b2ce53 3736
dc01c3d1 3737 return insn;
23b2ce53
RS
3738}
3739
3740/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3741 does not look inside SEQUENCEs. After reload this also skips over
3742 standalone USE and CLOBBER insn. */
23b2ce53 3743
eb51c837 3744rtx_insn *
7c9796ed 3745prev_active_insn (rtx_insn *insn)
23b2ce53 3746{
75547801
KG
3747 while (insn)
3748 {
3749 insn = PREV_INSN (insn);
3750 if (insn == 0 || active_insn_p (insn))
3751 break;
3752 }
23b2ce53 3753
dc01c3d1 3754 return insn;
23b2ce53 3755}
23b2ce53 3756\f
23b2ce53
RS
3757/* Return the next insn that uses CC0 after INSN, which is assumed to
3758 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3759 applied to the result of this function should yield INSN).
3760
3761 Normally, this is simply the next insn. However, if a REG_CC_USER note
3762 is present, it contains the insn that uses CC0.
3763
3764 Return 0 if we can't find the insn. */
3765
75b46023 3766rtx_insn *
475edec0 3767next_cc0_user (rtx_insn *insn)
23b2ce53 3768{
906c4e36 3769 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3770
3771 if (note)
75b46023 3772 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3773
3774 insn = next_nonnote_insn (insn);
4b4bf941 3775 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3776 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3777
2c3c49de 3778 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3779 return insn;
23b2ce53
RS
3780
3781 return 0;
3782}
3783
3784/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3785 note, it is the previous insn. */
3786
75b46023 3787rtx_insn *
5c8db5b4 3788prev_cc0_setter (rtx_insn *insn)
23b2ce53 3789{
906c4e36 3790 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3791
3792 if (note)
75b46023 3793 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3794
3795 insn = prev_nonnote_insn (insn);
5b0264cb 3796 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3797
dc01c3d1 3798 return insn;
23b2ce53 3799}
e5bef2e4 3800
594f8779
RZ
3801/* Find a RTX_AUTOINC class rtx which matches DATA. */
3802
3803static int
9021b8ec 3804find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3805{
9021b8ec
RS
3806 subrtx_iterator::array_type array;
3807 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3808 {
9021b8ec
RS
3809 const_rtx x = *iter;
3810 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3811 && rtx_equal_p (reg, XEXP (x, 0)))
3812 return true;
594f8779 3813 }
9021b8ec 3814 return false;
594f8779 3815}
594f8779 3816
e5bef2e4
HB
3817/* Increment the label uses for all labels present in rtx. */
3818
3819static void
502b8322 3820mark_label_nuses (rtx x)
e5bef2e4 3821{
b3694847
SS
3822 enum rtx_code code;
3823 int i, j;
3824 const char *fmt;
e5bef2e4
HB
3825
3826 code = GET_CODE (x);
04a121a7
TS
3827 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3828 LABEL_NUSES (label_ref_label (x))++;
e5bef2e4
HB
3829
3830 fmt = GET_RTX_FORMAT (code);
3831 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3832 {
3833 if (fmt[i] == 'e')
0fb7aeda 3834 mark_label_nuses (XEXP (x, i));
e5bef2e4 3835 else if (fmt[i] == 'E')
0fb7aeda 3836 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3837 mark_label_nuses (XVECEXP (x, i, j));
3838 }
3839}
3840
23b2ce53
RS
3841\f
3842/* Try splitting insns that can be split for better scheduling.
3843 PAT is the pattern which might split.
3844 TRIAL is the insn providing PAT.
cc2902df 3845 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3846
3847 If this routine succeeds in splitting, it returns the first or last
11147ebe 3848 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3849 returns TRIAL. If the insn to be returned can be split, it will be. */
3850
53f04688 3851rtx_insn *
bb5c4956 3852try_split (rtx pat, rtx_insn *trial, int last)
23b2ce53 3853{
d4eff95b 3854 rtx_insn *before, *after;
dc01c3d1
DM
3855 rtx note;
3856 rtx_insn *seq, *tem;
5fa396ad 3857 profile_probability probability;
dc01c3d1 3858 rtx_insn *insn_last, *insn;
599aedd9 3859 int njumps = 0;
e67d1102 3860 rtx_insn *call_insn = NULL;
6b24c259 3861
cd9c1ca8
RH
3862 /* We're not good at redistributing frame information. */
3863 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3864 return trial;
cd9c1ca8 3865
6b24c259
JH
3866 if (any_condjump_p (trial)
3867 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
5fa396ad
JH
3868 split_branch_probability
3869 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3870 else
3871 split_branch_probability = profile_probability::uninitialized ();
3872
6b24c259
JH
3873 probability = split_branch_probability;
3874
bb5c4956 3875 seq = split_insns (pat, trial);
6b24c259 3876
5fa396ad 3877 split_branch_probability = profile_probability::uninitialized ();
23b2ce53 3878
599aedd9 3879 if (!seq)
dc01c3d1 3880 return trial;
599aedd9
RH
3881
3882 /* Avoid infinite loop if any insn of the result matches
3883 the original pattern. */
3884 insn_last = seq;
3885 while (1)
23b2ce53 3886 {
599aedd9
RH
3887 if (INSN_P (insn_last)
3888 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3889 return trial;
599aedd9
RH
3890 if (!NEXT_INSN (insn_last))
3891 break;
3892 insn_last = NEXT_INSN (insn_last);
3893 }
750c9258 3894
6fb5fa3c
DB
3895 /* We will be adding the new sequence to the function. The splitters
3896 may have introduced invalid RTL sharing, so unshare the sequence now. */
3897 unshare_all_rtl_in_chain (seq);
3898
339ba33b 3899 /* Mark labels and copy flags. */
599aedd9
RH
3900 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3901 {
4b4bf941 3902 if (JUMP_P (insn))
599aedd9 3903 {
339ba33b
RS
3904 if (JUMP_P (trial))
3905 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3906 mark_jump_label (PATTERN (insn), insn, 0);
3907 njumps++;
5fa396ad 3908 if (probability.initialized_p ()
599aedd9
RH
3909 && any_condjump_p (insn)
3910 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3911 {
599aedd9
RH
3912 /* We can preserve the REG_BR_PROB notes only if exactly
3913 one jump is created, otherwise the machine description
3914 is responsible for this step using
3915 split_branch_probability variable. */
5b0264cb 3916 gcc_assert (njumps == 1);
5fa396ad 3917 add_reg_br_prob_note (insn, probability);
2f937369 3918 }
599aedd9
RH
3919 }
3920 }
3921
3922 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3923 in SEQ and copy any additional information across. */
4b4bf941 3924 if (CALL_P (trial))
599aedd9
RH
3925 {
3926 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3927 if (CALL_P (insn))
599aedd9 3928 {
4f660b15
RO
3929 gcc_assert (call_insn == NULL_RTX);
3930 call_insn = insn;
3931
65712d5c
RS
3932 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3933 target may have explicitly specified. */
00b94487 3934 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3935 while (*p)
3936 p = &XEXP (*p, 1);
3937 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3938
3939 /* If the old call was a sibling call, the new one must
3940 be too. */
599aedd9
RH
3941 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3942 }
3943 }
4b5e8abe 3944
599aedd9
RH
3945 /* Copy notes, particularly those related to the CFG. */
3946 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3947 {
3948 switch (REG_NOTE_KIND (note))
3949 {
3950 case REG_EH_REGION:
1d65f45c 3951 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3952 break;
216183ce 3953
599aedd9
RH
3954 case REG_NORETURN:
3955 case REG_SETJMP:
0a35513e 3956 case REG_TM:
5c5f0b65 3957 case REG_CALL_NOCF_CHECK:
00b94487 3958 case REG_CALL_ARG_LOCATION:
594f8779 3959 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3960 {
4b4bf941 3961 if (CALL_P (insn))
65c5f2a6 3962 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3963 }
599aedd9 3964 break;
d6e95df8 3965
599aedd9 3966 case REG_NON_LOCAL_GOTO:
73f1289e 3967 case REG_LABEL_TARGET:
594f8779 3968 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3969 {
4b4bf941 3970 if (JUMP_P (insn))
65c5f2a6 3971 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3972 }
599aedd9 3973 break;
e5bef2e4 3974
594f8779 3975 case REG_INC:
760edf20
TS
3976 if (!AUTO_INC_DEC)
3977 break;
3978
594f8779
RZ
3979 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3980 {
3981 rtx reg = XEXP (note, 0);
3982 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3983 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3984 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3985 }
3986 break;
594f8779 3987
9a08d230 3988 case REG_ARGS_SIZE:
68184180 3989 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
9a08d230
RH
3990 break;
3991
4f660b15
RO
3992 case REG_CALL_DECL:
3993 gcc_assert (call_insn != NULL_RTX);
3994 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3995 break;
3996
599aedd9
RH
3997 default:
3998 break;
23b2ce53 3999 }
599aedd9
RH
4000 }
4001
4002 /* If there are LABELS inside the split insns increment the
4003 usage count so we don't delete the label. */
cf7c4aa6 4004 if (INSN_P (trial))
599aedd9
RH
4005 {
4006 insn = insn_last;
4007 while (insn != NULL_RTX)
23b2ce53 4008 {
cf7c4aa6 4009 /* JUMP_P insns have already been "marked" above. */
4b4bf941 4010 if (NONJUMP_INSN_P (insn))
599aedd9 4011 mark_label_nuses (PATTERN (insn));
23b2ce53 4012
599aedd9
RH
4013 insn = PREV_INSN (insn);
4014 }
23b2ce53
RS
4015 }
4016
d4eff95b
JC
4017 before = PREV_INSN (trial);
4018 after = NEXT_INSN (trial);
4019
5368224f 4020 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
4021
4022 delete_insn (trial);
599aedd9
RH
4023
4024 /* Recursively call try_split for each new insn created; by the
4025 time control returns here that insn will be fully split, so
4026 set LAST and continue from the insn after the one returned.
4027 We can't use next_active_insn here since AFTER may be a note.
4028 Ignore deleted insns, which can be occur if not optimizing. */
4029 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4654c0cf 4030 if (! tem->deleted () && INSN_P (tem))
599aedd9
RH
4031 tem = try_split (PATTERN (tem), tem, 1);
4032
4033 /* Return either the first or the last insn, depending on which was
4034 requested. */
4035 return last
5936d944 4036 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 4037 : NEXT_INSN (before);
23b2ce53
RS
4038}
4039\f
4040/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 4041 Store PATTERN in the pattern slots. */
23b2ce53 4042
167b9fae 4043rtx_insn *
502b8322 4044make_insn_raw (rtx pattern)
23b2ce53 4045{
167b9fae 4046 rtx_insn *insn;
23b2ce53 4047
167b9fae 4048 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 4049
43127294 4050 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
4051 PATTERN (insn) = pattern;
4052 INSN_CODE (insn) = -1;
1632afca 4053 REG_NOTES (insn) = NULL;
5368224f 4054 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 4055 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 4056
47984720
NC
4057#ifdef ENABLE_RTL_CHECKING
4058 if (insn
2c3c49de 4059 && INSN_P (insn)
47984720
NC
4060 && (returnjump_p (insn)
4061 || (GET_CODE (insn) == SET
4062 && SET_DEST (insn) == pc_rtx)))
4063 {
d4ee4d25 4064 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
4065 debug_rtx (insn);
4066 }
4067#endif
750c9258 4068
23b2ce53
RS
4069 return insn;
4070}
4071
b5b8b0ac
AO
4072/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4073
167b9fae 4074static rtx_insn *
b5b8b0ac
AO
4075make_debug_insn_raw (rtx pattern)
4076{
167b9fae 4077 rtx_debug_insn *insn;
b5b8b0ac 4078
167b9fae 4079 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
4080 INSN_UID (insn) = cur_debug_insn_uid++;
4081 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
4082 INSN_UID (insn) = cur_insn_uid++;
4083
4084 PATTERN (insn) = pattern;
4085 INSN_CODE (insn) = -1;
4086 REG_NOTES (insn) = NULL;
5368224f 4087 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
4088 BLOCK_FOR_INSN (insn) = NULL;
4089
4090 return insn;
4091}
4092
2f937369 4093/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 4094
167b9fae 4095static rtx_insn *
502b8322 4096make_jump_insn_raw (rtx pattern)
23b2ce53 4097{
167b9fae 4098 rtx_jump_insn *insn;
23b2ce53 4099
167b9fae 4100 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 4101 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
4102
4103 PATTERN (insn) = pattern;
4104 INSN_CODE (insn) = -1;
1632afca
RS
4105 REG_NOTES (insn) = NULL;
4106 JUMP_LABEL (insn) = NULL;
5368224f 4107 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 4108 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
4109
4110 return insn;
4111}
aff507f4 4112
2f937369 4113/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 4114
167b9fae 4115static rtx_insn *
502b8322 4116make_call_insn_raw (rtx pattern)
aff507f4 4117{
167b9fae 4118 rtx_call_insn *insn;
aff507f4 4119
167b9fae 4120 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
4121 INSN_UID (insn) = cur_insn_uid++;
4122
4123 PATTERN (insn) = pattern;
4124 INSN_CODE (insn) = -1;
aff507f4
RK
4125 REG_NOTES (insn) = NULL;
4126 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 4127 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 4128 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
4129
4130 return insn;
4131}
96fba521
SB
4132
4133/* Like `make_insn_raw' but make a NOTE instead of an insn. */
4134
66e8df53 4135static rtx_note *
96fba521
SB
4136make_note_raw (enum insn_note subtype)
4137{
4138 /* Some notes are never created this way at all. These notes are
4139 only created by patching out insns. */
4140 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4141 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4142
66e8df53 4143 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
4144 INSN_UID (note) = cur_insn_uid++;
4145 NOTE_KIND (note) = subtype;
4146 BLOCK_FOR_INSN (note) = NULL;
4147 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4148 return note;
4149}
23b2ce53 4150\f
96fba521
SB
4151/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4152 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4153 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4154
4155static inline void
9152e0aa 4156link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 4157{
0f82e5c9
DM
4158 SET_PREV_INSN (insn) = prev;
4159 SET_NEXT_INSN (insn) = next;
96fba521
SB
4160 if (prev != NULL)
4161 {
0f82e5c9 4162 SET_NEXT_INSN (prev) = insn;
96fba521
SB
4163 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4164 {
e6eda746
DM
4165 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4166 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
4167 }
4168 }
4169 if (next != NULL)
4170 {
0f82e5c9 4171 SET_PREV_INSN (next) = insn;
96fba521 4172 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4173 {
4174 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4175 SET_PREV_INSN (sequence->insn (0)) = insn;
4176 }
96fba521 4177 }
3ccb989e
SB
4178
4179 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4180 {
e6eda746
DM
4181 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4182 SET_PREV_INSN (sequence->insn (0)) = prev;
4183 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 4184 }
96fba521
SB
4185}
4186
23b2ce53
RS
4187/* Add INSN to the end of the doubly-linked list.
4188 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4189
4190void
9152e0aa 4191add_insn (rtx_insn *insn)
23b2ce53 4192{
9152e0aa 4193 rtx_insn *prev = get_last_insn ();
96fba521 4194 link_insn_into_chain (insn, prev, NULL);
01512446 4195 if (get_insns () == NULL)
5936d944 4196 set_first_insn (insn);
5936d944 4197 set_last_insn (insn);
23b2ce53
RS
4198}
4199
96fba521 4200/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 4201
96fba521 4202static void
9152e0aa 4203add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 4204{
9152e0aa 4205 rtx_insn *next = NEXT_INSN (after);
23b2ce53 4206
4654c0cf 4207 gcc_assert (!optimize || !after->deleted ());
ba213285 4208
96fba521 4209 link_insn_into_chain (insn, after, next);
23b2ce53 4210
96fba521 4211 if (next == NULL)
23b2ce53 4212 {
614d5bd8
AM
4213 struct sequence_stack *seq;
4214
4215 for (seq = get_current_sequence (); seq; seq = seq->next)
4216 if (after == seq->last)
4217 {
4218 seq->last = insn;
4219 break;
4220 }
23b2ce53 4221 }
96fba521
SB
4222}
4223
4224/* Add INSN into the doubly-linked list before insn BEFORE. */
4225
4226static void
9152e0aa 4227add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4228{
9152e0aa 4229 rtx_insn *prev = PREV_INSN (before);
96fba521 4230
4654c0cf 4231 gcc_assert (!optimize || !before->deleted ());
96fba521
SB
4232
4233 link_insn_into_chain (insn, prev, before);
4234
4235 if (prev == NULL)
23b2ce53 4236 {
614d5bd8 4237 struct sequence_stack *seq;
a0ae8e8d 4238
614d5bd8
AM
4239 for (seq = get_current_sequence (); seq; seq = seq->next)
4240 if (before == seq->first)
4241 {
4242 seq->first = insn;
4243 break;
4244 }
4245
4246 gcc_assert (seq);
23b2ce53 4247 }
96fba521
SB
4248}
4249
4250/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4251 If BB is NULL, an attempt is made to infer the bb from before.
4252
4253 This and the next function should be the only functions called
4254 to insert an insn once delay slots have been filled since only
4255 they know how to update a SEQUENCE. */
23b2ce53 4256
96fba521 4257void
4dea3bff 4258add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
96fba521
SB
4259{
4260 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4261 if (!BARRIER_P (after)
4262 && !BARRIER_P (insn)
3c030e88
JH
4263 && (bb = BLOCK_FOR_INSN (after)))
4264 {
4265 set_block_for_insn (insn, bb);
38c1593d 4266 if (INSN_P (insn))
6fb5fa3c 4267 df_insn_rescan (insn);
3c030e88 4268 /* Should not happen as first in the BB is always
a1f300c0 4269 either NOTE or LABEL. */
a813c111 4270 if (BB_END (bb) == after
3c030e88 4271 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4272 && !BARRIER_P (insn)
a38e7aa5 4273 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4274 BB_END (bb) = insn;
3c030e88 4275 }
23b2ce53
RS
4276}
4277
96fba521
SB
4278/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4279 If BB is NULL, an attempt is made to infer the bb from before.
4280
4281 This and the previous function should be the only functions called
4282 to insert an insn once delay slots have been filled since only
4283 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4284
4285void
4dea3bff 4286add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
a0ae8e8d 4287{
96fba521 4288 add_insn_before_nobb (insn, before);
a0ae8e8d 4289
b8698a0f 4290 if (!bb
6fb5fa3c
DB
4291 && !BARRIER_P (before)
4292 && !BARRIER_P (insn))
4293 bb = BLOCK_FOR_INSN (before);
4294
4295 if (bb)
3c030e88
JH
4296 {
4297 set_block_for_insn (insn, bb);
38c1593d 4298 if (INSN_P (insn))
6fb5fa3c 4299 df_insn_rescan (insn);
5b0264cb 4300 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4301 LABEL. */
5b0264cb
NS
4302 gcc_assert (BB_HEAD (bb) != insn
4303 /* Avoid clobbering of structure when creating new BB. */
4304 || BARRIER_P (insn)
a38e7aa5 4305 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4306 }
a0ae8e8d
RK
4307}
4308
6fb5fa3c
DB
4309/* Replace insn with an deleted instruction note. */
4310
0ce2b299 4311void
df0b55f0 4312set_insn_deleted (rtx_insn *insn)
6fb5fa3c 4313{
39718607 4314 if (INSN_P (insn))
df0b55f0 4315 df_insn_delete (insn);
6fb5fa3c
DB
4316 PUT_CODE (insn, NOTE);
4317 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4318}
4319
4320
1f397f45
SB
4321/* Unlink INSN from the insn chain.
4322
4323 This function knows how to handle sequences.
4324
4325 This function does not invalidate data flow information associated with
4326 INSN (i.e. does not call df_insn_delete). That makes this function
4327 usable for only disconnecting an insn from the chain, and re-emit it
4328 elsewhere later.
4329
4330 To later insert INSN elsewhere in the insn chain via add_insn and
4331 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4332 the caller. Nullifying them here breaks many insn chain walks.
4333
4334 To really delete an insn and related DF information, use delete_insn. */
4335
89e99eea 4336void
4dea3bff 4337remove_insn (rtx_insn *insn)
89e99eea 4338{
1130d5e3
DM
4339 rtx_insn *next = NEXT_INSN (insn);
4340 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4341 basic_block bb;
4342
89e99eea
DB
4343 if (prev)
4344 {
0f82e5c9 4345 SET_NEXT_INSN (prev) = next;
4b4bf941 4346 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4347 {
e6eda746
DM
4348 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4349 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4350 }
4351 }
89e99eea
DB
4352 else
4353 {
614d5bd8
AM
4354 struct sequence_stack *seq;
4355
4356 for (seq = get_current_sequence (); seq; seq = seq->next)
4357 if (insn == seq->first)
89e99eea 4358 {
614d5bd8 4359 seq->first = next;
89e99eea
DB
4360 break;
4361 }
4362
614d5bd8 4363 gcc_assert (seq);
89e99eea
DB
4364 }
4365
4366 if (next)
4367 {
0f82e5c9 4368 SET_PREV_INSN (next) = prev;
4b4bf941 4369 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4370 {
4371 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4372 SET_PREV_INSN (sequence->insn (0)) = prev;
4373 }
89e99eea 4374 }
89e99eea
DB
4375 else
4376 {
614d5bd8
AM
4377 struct sequence_stack *seq;
4378
4379 for (seq = get_current_sequence (); seq; seq = seq->next)
4380 if (insn == seq->last)
89e99eea 4381 {
614d5bd8 4382 seq->last = prev;
89e99eea
DB
4383 break;
4384 }
4385
614d5bd8 4386 gcc_assert (seq);
89e99eea 4387 }
80eb8028 4388
80eb8028 4389 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4390 if (!BARRIER_P (insn)
53c17031
JH
4391 && (bb = BLOCK_FOR_INSN (insn)))
4392 {
a813c111 4393 if (BB_HEAD (bb) == insn)
53c17031 4394 {
3bf1e984
RK
4395 /* Never ever delete the basic block note without deleting whole
4396 basic block. */
5b0264cb 4397 gcc_assert (!NOTE_P (insn));
1130d5e3 4398 BB_HEAD (bb) = next;
53c17031 4399 }
a813c111 4400 if (BB_END (bb) == insn)
1130d5e3 4401 BB_END (bb) = prev;
53c17031 4402 }
89e99eea
DB
4403}
4404
ee960939
OH
4405/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4406
4407void
502b8322 4408add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4409{
5b0264cb 4410 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4411
4412 /* Put the register usage information on the CALL. If there is already
4413 some usage information, put ours at the end. */
4414 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4415 {
4416 rtx link;
4417
4418 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4419 link = XEXP (link, 1))
4420 ;
4421
4422 XEXP (link, 1) = call_fusage;
4423 }
4424 else
4425 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4426}
4427
23b2ce53
RS
4428/* Delete all insns made since FROM.
4429 FROM becomes the new last instruction. */
4430
4431void
fee3e72c 4432delete_insns_since (rtx_insn *from)
23b2ce53
RS
4433{
4434 if (from == 0)
5936d944 4435 set_first_insn (0);
23b2ce53 4436 else
0f82e5c9 4437 SET_NEXT_INSN (from) = 0;
5936d944 4438 set_last_insn (from);
23b2ce53
RS
4439}
4440
5dab5552
MS
4441/* This function is deprecated, please use sequences instead.
4442
4443 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4444 The insns to be moved are those between FROM and TO.
4445 They are moved to a new position after the insn AFTER.
4446 AFTER must not be FROM or TO or any insn in between.
4447
4448 This function does not know about SEQUENCEs and hence should not be
4449 called after delay-slot filling has been done. */
4450
4451void
fee3e72c 4452reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4453{
b2b29377
MM
4454 if (flag_checking)
4455 {
4456 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4457 gcc_assert (after != x);
4458 gcc_assert (after != to);
4459 }
4f8344eb 4460
23b2ce53
RS
4461 /* Splice this bunch out of where it is now. */
4462 if (PREV_INSN (from))
0f82e5c9 4463 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4464 if (NEXT_INSN (to))
0f82e5c9 4465 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4466 if (get_last_insn () == to)
4467 set_last_insn (PREV_INSN (from));
4468 if (get_insns () == from)
4469 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4470
4471 /* Make the new neighbors point to it and it to them. */
4472 if (NEXT_INSN (after))
0f82e5c9 4473 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4474
0f82e5c9
DM
4475 SET_NEXT_INSN (to) = NEXT_INSN (after);
4476 SET_PREV_INSN (from) = after;
4477 SET_NEXT_INSN (after) = from;
c3284718 4478 if (after == get_last_insn ())
5936d944 4479 set_last_insn (to);
23b2ce53
RS
4480}
4481
3c030e88
JH
4482/* Same as function above, but take care to update BB boundaries. */
4483void
ac9d2d2c 4484reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4485{
ac9d2d2c 4486 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4487 basic_block bb, bb2;
4488
4489 reorder_insns_nobb (from, to, after);
4490
4b4bf941 4491 if (!BARRIER_P (after)
3c030e88
JH
4492 && (bb = BLOCK_FOR_INSN (after)))
4493 {
b2908ba6 4494 rtx_insn *x;
6fb5fa3c 4495 df_set_bb_dirty (bb);
68252e27 4496
4b4bf941 4497 if (!BARRIER_P (from)
3c030e88
JH
4498 && (bb2 = BLOCK_FOR_INSN (from)))
4499 {
a813c111 4500 if (BB_END (bb2) == to)
1130d5e3 4501 BB_END (bb2) = prev;
6fb5fa3c 4502 df_set_bb_dirty (bb2);
3c030e88
JH
4503 }
4504
a813c111 4505 if (BB_END (bb) == after)
1130d5e3 4506 BB_END (bb) = to;
3c030e88
JH
4507
4508 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4509 if (!BARRIER_P (x))
63642d5a 4510 df_insn_change_bb (x, bb);
3c030e88
JH
4511 }
4512}
4513
23b2ce53 4514\f
2f937369
DM
4515/* Emit insn(s) of given code and pattern
4516 at a specified place within the doubly-linked list.
23b2ce53 4517
2f937369
DM
4518 All of the emit_foo global entry points accept an object
4519 X which is either an insn list or a PATTERN of a single
4520 instruction.
23b2ce53 4521
2f937369
DM
4522 There are thus a few canonical ways to generate code and
4523 emit it at a specific place in the instruction stream. For
4524 example, consider the instruction named SPOT and the fact that
4525 we would like to emit some instructions before SPOT. We might
4526 do it like this:
23b2ce53 4527
2f937369
DM
4528 start_sequence ();
4529 ... emit the new instructions ...
4530 insns_head = get_insns ();
4531 end_sequence ();
23b2ce53 4532
2f937369 4533 emit_insn_before (insns_head, SPOT);
23b2ce53 4534
2f937369
DM
4535 It used to be common to generate SEQUENCE rtl instead, but that
4536 is a relic of the past which no longer occurs. The reason is that
4537 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4538 generated would almost certainly die right after it was created. */
23b2ce53 4539
cd459bf8 4540static rtx_insn *
4dea3bff
DM
4541emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4542 basic_block bb,
167b9fae 4543 rtx_insn *(*make_raw) (rtx))
23b2ce53 4544{
167b9fae 4545 rtx_insn *insn;
23b2ce53 4546
5b0264cb 4547 gcc_assert (before);
2f937369
DM
4548
4549 if (x == NULL_RTX)
4dea3bff 4550 return last;
2f937369
DM
4551
4552 switch (GET_CODE (x))
23b2ce53 4553 {
b5b8b0ac 4554 case DEBUG_INSN:
2f937369
DM
4555 case INSN:
4556 case JUMP_INSN:
4557 case CALL_INSN:
4558 case CODE_LABEL:
4559 case BARRIER:
4560 case NOTE:
167b9fae 4561 insn = as_a <rtx_insn *> (x);
2f937369
DM
4562 while (insn)
4563 {
167b9fae 4564 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4565 add_insn_before (insn, before, bb);
2f937369
DM
4566 last = insn;
4567 insn = next;
4568 }
4569 break;
4570
4571#ifdef ENABLE_RTL_CHECKING
4572 case SEQUENCE:
5b0264cb 4573 gcc_unreachable ();
2f937369
DM
4574 break;
4575#endif
4576
4577 default:
5f02387d 4578 last = (*make_raw) (x);
6fb5fa3c 4579 add_insn_before (last, before, bb);
2f937369 4580 break;
23b2ce53
RS
4581 }
4582
4dea3bff 4583 return last;
23b2ce53
RS
4584}
4585
5f02387d
NF
4586/* Make X be output before the instruction BEFORE. */
4587
cd459bf8 4588rtx_insn *
596f2b17 4589emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f02387d
NF
4590{
4591 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4592}
4593
2f937369 4594/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4595 and output it before the instruction BEFORE. */
4596
1476d1bd 4597rtx_jump_insn *
596f2b17 4598emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
23b2ce53 4599{
1476d1bd 4600 return as_a <rtx_jump_insn *> (
4dea3bff 4601 emit_pattern_before_noloc (x, before, NULL, NULL,
1476d1bd 4602 make_jump_insn_raw));
23b2ce53
RS
4603}
4604
2f937369 4605/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4606 and output it before the instruction BEFORE. */
4607
cd459bf8 4608rtx_insn *
596f2b17 4609emit_call_insn_before_noloc (rtx x, rtx_insn *before)
969d70ca 4610{
4dea3bff 4611 return emit_pattern_before_noloc (x, before, NULL, NULL,
5f02387d 4612 make_call_insn_raw);
969d70ca
JH
4613}
4614
b5b8b0ac
AO
4615/* Make an instruction with body X and code DEBUG_INSN
4616 and output it before the instruction BEFORE. */
4617
cd459bf8 4618rtx_insn *
4dea3bff 4619emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
b5b8b0ac 4620{
4dea3bff 4621 return emit_pattern_before_noloc (x, before, NULL, NULL,
5f02387d 4622 make_debug_insn_raw);
b5b8b0ac
AO
4623}
4624
23b2ce53 4625/* Make an insn of code BARRIER
e881bb1b 4626 and output it before the insn BEFORE. */
23b2ce53 4627
cd459bf8 4628rtx_barrier *
4dea3bff 4629emit_barrier_before (rtx_insn *before)
23b2ce53 4630{
cd459bf8 4631 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4632
4633 INSN_UID (insn) = cur_insn_uid++;
4634
6fb5fa3c 4635 add_insn_before (insn, before, NULL);
23b2ce53
RS
4636 return insn;
4637}
4638
e881bb1b
RH
4639/* Emit the label LABEL before the insn BEFORE. */
4640
1476d1bd 4641rtx_code_label *
4dea3bff 4642emit_label_before (rtx_code_label *label, rtx_insn *before)
e881bb1b 4643{
468660d3
SB
4644 gcc_checking_assert (INSN_UID (label) == 0);
4645 INSN_UID (label) = cur_insn_uid++;
4646 add_insn_before (label, before, NULL);
4dea3bff 4647 return label;
e881bb1b 4648}
23b2ce53 4649\f
2f937369
DM
4650/* Helper for emit_insn_after, handles lists of instructions
4651 efficiently. */
23b2ce53 4652
e6eda746 4653static rtx_insn *
4dea3bff 4654emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
23b2ce53 4655{
1130d5e3
DM
4656 rtx_insn *last;
4657 rtx_insn *after_after;
6fb5fa3c
DB
4658 if (!bb && !BARRIER_P (after))
4659 bb = BLOCK_FOR_INSN (after);
23b2ce53 4660
6fb5fa3c 4661 if (bb)
23b2ce53 4662 {
6fb5fa3c 4663 df_set_bb_dirty (bb);
2f937369 4664 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4665 if (!BARRIER_P (last))
6fb5fa3c
DB
4666 {
4667 set_block_for_insn (last, bb);
4668 df_insn_rescan (last);
4669 }
4b4bf941 4670 if (!BARRIER_P (last))
6fb5fa3c
DB
4671 {
4672 set_block_for_insn (last, bb);
4673 df_insn_rescan (last);
4674 }
a813c111 4675 if (BB_END (bb) == after)
1130d5e3 4676 BB_END (bb) = last;
23b2ce53
RS
4677 }
4678 else
2f937369
DM
4679 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4680 continue;
4681
4682 after_after = NEXT_INSN (after);
4683
0f82e5c9
DM
4684 SET_NEXT_INSN (after) = first;
4685 SET_PREV_INSN (first) = after;
4686 SET_NEXT_INSN (last) = after_after;
2f937369 4687 if (after_after)
0f82e5c9 4688 SET_PREV_INSN (after_after) = last;
2f937369 4689
c3284718 4690 if (after == get_last_insn ())
5936d944 4691 set_last_insn (last);
e855c69d 4692
2f937369
DM
4693 return last;
4694}
4695
cd459bf8 4696static rtx_insn *
4dea3bff 4697emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
167b9fae 4698 rtx_insn *(*make_raw)(rtx))
2f937369 4699{
e6eda746 4700 rtx_insn *last = after;
2f937369 4701
5b0264cb 4702 gcc_assert (after);
2f937369
DM
4703
4704 if (x == NULL_RTX)
e6eda746 4705 return last;
2f937369
DM
4706
4707 switch (GET_CODE (x))
23b2ce53 4708 {
b5b8b0ac 4709 case DEBUG_INSN:
2f937369
DM
4710 case INSN:
4711 case JUMP_INSN:
4712 case CALL_INSN:
4713 case CODE_LABEL:
4714 case BARRIER:
4715 case NOTE:
1130d5e3 4716 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4717 break;
4718
4719#ifdef ENABLE_RTL_CHECKING
4720 case SEQUENCE:
5b0264cb 4721 gcc_unreachable ();
2f937369
DM
4722 break;
4723#endif
4724
4725 default:
5f02387d 4726 last = (*make_raw) (x);
6fb5fa3c 4727 add_insn_after (last, after, bb);
2f937369 4728 break;
23b2ce53
RS
4729 }
4730
e6eda746 4731 return last;
23b2ce53
RS
4732}
4733
5f02387d
NF
4734/* Make X be output after the insn AFTER and set the BB of insn. If
4735 BB is NULL, an attempt is made to infer the BB from AFTER. */
4736
cd459bf8 4737rtx_insn *
4dea3bff 4738emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
5f02387d
NF
4739{
4740 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4741}
4742
255680cf 4743
2f937369 4744/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4745 and output it after the insn AFTER. */
4746
1476d1bd 4747rtx_jump_insn *
4dea3bff 4748emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
23b2ce53 4749{
1476d1bd
MM
4750 return as_a <rtx_jump_insn *> (
4751 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
2f937369
DM
4752}
4753
4754/* Make an instruction with body X and code CALL_INSN
4755 and output it after the instruction AFTER. */
4756
cd459bf8 4757rtx_insn *
4dea3bff 4758emit_call_insn_after_noloc (rtx x, rtx_insn *after)
2f937369 4759{
5f02387d 4760 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4761}
4762
b5b8b0ac
AO
4763/* Make an instruction with body X and code CALL_INSN
4764 and output it after the instruction AFTER. */
4765
cd459bf8 4766rtx_insn *
4dea3bff 4767emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
b5b8b0ac 4768{
5f02387d 4769 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4770}
4771
23b2ce53
RS
4772/* Make an insn of code BARRIER
4773 and output it after the insn AFTER. */
4774
cd459bf8 4775rtx_barrier *
4dea3bff 4776emit_barrier_after (rtx_insn *after)
23b2ce53 4777{
cd459bf8 4778 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4779
4780 INSN_UID (insn) = cur_insn_uid++;
4781
6fb5fa3c 4782 add_insn_after (insn, after, NULL);
23b2ce53
RS
4783 return insn;
4784}
4785
4786/* Emit the label LABEL after the insn AFTER. */
4787
cd459bf8 4788rtx_insn *
4dea3bff 4789emit_label_after (rtx_insn *label, rtx_insn *after)
23b2ce53 4790{
468660d3
SB
4791 gcc_checking_assert (INSN_UID (label) == 0);
4792 INSN_UID (label) = cur_insn_uid++;
4793 add_insn_after (label, after, NULL);
4dea3bff 4794 return label;
23b2ce53 4795}
96fba521
SB
4796\f
4797/* Notes require a bit of special handling: Some notes need to have their
4798 BLOCK_FOR_INSN set, others should never have it set, and some should
4799 have it set or clear depending on the context. */
4800
4801/* Return true iff a note of kind SUBTYPE should be emitted with routines
4802 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4803 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4804
4805static bool
4806note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4807{
4808 switch (subtype)
4809 {
4810 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4811 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4812 return true;
4813
4814 /* Notes for var tracking and EH region markers can appear between or
4815 inside basic blocks. If the caller is emitting on the basic block
4816 boundary, do not set BLOCK_FOR_INSN on the new note. */
4817 case NOTE_INSN_VAR_LOCATION:
96fba521
SB
4818 case NOTE_INSN_EH_REGION_BEG:
4819 case NOTE_INSN_EH_REGION_END:
4820 return on_bb_boundary_p;
4821
4822 /* Otherwise, BLOCK_FOR_INSN must be set. */
4823 default:
4824 return false;
4825 }
4826}
23b2ce53
RS
4827
4828/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4829
66e8df53 4830rtx_note *
589e43f9 4831emit_note_after (enum insn_note subtype, rtx_insn *after)
23b2ce53 4832{
66e8df53 4833 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4834 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4835 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4836
4837 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4838 add_insn_after_nobb (note, after);
4839 else
4840 add_insn_after (note, after, bb);
4841 return note;
4842}
4843
4844/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4845
66e8df53 4846rtx_note *
89b6250d 4847emit_note_before (enum insn_note subtype, rtx_insn *before)
96fba521 4848{
66e8df53 4849 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4850 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4851 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4852
4853 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4854 add_insn_before_nobb (note, before);
4855 else
4856 add_insn_before (note, before, bb);
23b2ce53
RS
4857 return note;
4858}
23b2ce53 4859\f
e8110d6f
NF
4860/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4861 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4862
cd459bf8 4863static rtx_insn *
4dea3bff 4864emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
167b9fae 4865 rtx_insn *(*make_raw) (rtx))
0d682900 4866{
e67d1102 4867 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4868
a7102479 4869 if (pattern == NULL_RTX || !loc)
e67d1102 4870 return last;
dd3adcf8 4871
2f937369
DM
4872 after = NEXT_INSN (after);
4873 while (1)
4874 {
20d4397a
EB
4875 if (active_insn_p (after)
4876 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4877 && !INSN_LOCATION (after))
5368224f 4878 INSN_LOCATION (after) = loc;
2f937369
DM
4879 if (after == last)
4880 break;
4881 after = NEXT_INSN (after);
4882 }
e67d1102 4883 return last;
0d682900
JH
4884}
4885
e8110d6f
NF
4886/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4887 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4888 any DEBUG_INSNs. */
4889
cd459bf8 4890static rtx_insn *
df0b55f0 4891emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
167b9fae 4892 rtx_insn *(*make_raw) (rtx))
a7102479 4893{
dc01c3d1 4894 rtx_insn *prev = after;
b5b8b0ac 4895
e8110d6f
NF
4896 if (skip_debug_insns)
4897 while (DEBUG_INSN_P (prev))
4898 prev = PREV_INSN (prev);
b5b8b0ac
AO
4899
4900 if (INSN_P (prev))
5368224f 4901 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4902 make_raw);
a7102479 4903 else
e8110d6f 4904 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4905}
4906
5368224f 4907/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4908rtx_insn *
4dea3bff 4909emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
0d682900 4910{
e8110d6f
NF
4911 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4912}
2f937369 4913
5368224f 4914/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4915rtx_insn *
df0b55f0 4916emit_insn_after (rtx pattern, rtx_insn *after)
e8110d6f
NF
4917{
4918 return emit_pattern_after (pattern, after, true, make_insn_raw);
4919}
dd3adcf8 4920
5368224f 4921/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4922rtx_jump_insn *
4dea3bff 4923emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
e8110d6f 4924{
1476d1bd
MM
4925 return as_a <rtx_jump_insn *> (
4926 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
0d682900
JH
4927}
4928
5368224f 4929/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
1476d1bd 4930rtx_jump_insn *
df0b55f0 4931emit_jump_insn_after (rtx pattern, rtx_insn *after)
a7102479 4932{
1476d1bd
MM
4933 return as_a <rtx_jump_insn *> (
4934 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
a7102479
JH
4935}
4936
5368224f 4937/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4938rtx_insn *
4dea3bff 4939emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
0d682900 4940{
e8110d6f 4941 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4942}
4943
5368224f 4944/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4945rtx_insn *
df0b55f0 4946emit_call_insn_after (rtx pattern, rtx_insn *after)
a7102479 4947{
e8110d6f 4948 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4949}
4950
5368224f 4951/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4952rtx_insn *
4dea3bff 4953emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
b5b8b0ac 4954{
e8110d6f 4955 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4956}
4957
5368224f 4958/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4959rtx_insn *
df0b55f0 4960emit_debug_insn_after (rtx pattern, rtx_insn *after)
b5b8b0ac 4961{
e8110d6f 4962 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4963}
4964
e8110d6f
NF
4965/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4966 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4967 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4968 CALL_INSN, etc. */
4969
cd459bf8 4970static rtx_insn *
4dea3bff
DM
4971emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4972 bool insnp, rtx_insn *(*make_raw) (rtx))
0d682900 4973{
dc01c3d1
DM
4974 rtx_insn *first = PREV_INSN (before);
4975 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4dea3bff 4976 insnp ? before : NULL,
dc01c3d1 4977 NULL, make_raw);
a7102479
JH
4978
4979 if (pattern == NULL_RTX || !loc)
dc01c3d1 4980 return last;
a7102479 4981
26cb3993
JH
4982 if (!first)
4983 first = get_insns ();
4984 else
4985 first = NEXT_INSN (first);
a7102479
JH
4986 while (1)
4987 {
20d4397a
EB
4988 if (active_insn_p (first)
4989 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4990 && !INSN_LOCATION (first))
5368224f 4991 INSN_LOCATION (first) = loc;
a7102479
JH
4992 if (first == last)
4993 break;
4994 first = NEXT_INSN (first);
4995 }
dc01c3d1 4996 return last;
a7102479
JH
4997}
4998
e8110d6f
NF
4999/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
5000 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
5001 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
5002 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
5003
cd459bf8 5004static rtx_insn *
df0b55f0 5005emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
167b9fae 5006 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 5007{
dc01c3d1 5008 rtx_insn *next = before;
b5b8b0ac 5009
e8110d6f
NF
5010 if (skip_debug_insns)
5011 while (DEBUG_INSN_P (next))
5012 next = PREV_INSN (next);
b5b8b0ac
AO
5013
5014 if (INSN_P (next))
5368224f 5015 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 5016 insnp, make_raw);
a7102479 5017 else
e8110d6f 5018 return emit_pattern_before_noloc (pattern, before,
4dea3bff 5019 insnp ? before : NULL,
e8110d6f 5020 NULL, make_raw);
a7102479
JH
5021}
5022
5368224f 5023/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 5024rtx_insn *
4dea3bff 5025emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
a7102479 5026{
e8110d6f
NF
5027 return emit_pattern_before_setloc (pattern, before, loc, true,
5028 make_insn_raw);
5029}
a7102479 5030
5368224f 5031/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 5032rtx_insn *
df0b55f0 5033emit_insn_before (rtx pattern, rtx_insn *before)
e8110d6f
NF
5034{
5035 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
5036}
a7102479 5037
5368224f 5038/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 5039rtx_jump_insn *
4dea3bff 5040emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
e8110d6f 5041{
1476d1bd
MM
5042 return as_a <rtx_jump_insn *> (
5043 emit_pattern_before_setloc (pattern, before, loc, false,
5044 make_jump_insn_raw));
a7102479
JH
5045}
5046
5368224f 5047/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
1476d1bd 5048rtx_jump_insn *
df0b55f0 5049emit_jump_insn_before (rtx pattern, rtx_insn *before)
a7102479 5050{
1476d1bd
MM
5051 return as_a <rtx_jump_insn *> (
5052 emit_pattern_before (pattern, before, true, false,
5053 make_jump_insn_raw));
a7102479
JH
5054}
5055
5368224f 5056/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 5057rtx_insn *
4dea3bff 5058emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
a7102479 5059{
e8110d6f
NF
5060 return emit_pattern_before_setloc (pattern, before, loc, false,
5061 make_call_insn_raw);
0d682900 5062}
a7102479 5063
e8110d6f 5064/* Like emit_call_insn_before_noloc,
5368224f 5065 but set insn_location according to BEFORE. */
cd459bf8 5066rtx_insn *
596f2b17 5067emit_call_insn_before (rtx pattern, rtx_insn *before)
a7102479 5068{
e8110d6f
NF
5069 return emit_pattern_before (pattern, before, true, false,
5070 make_call_insn_raw);
a7102479 5071}
b5b8b0ac 5072
5368224f 5073/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 5074rtx_insn *
4dea3bff 5075emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
b5b8b0ac 5076{
e8110d6f
NF
5077 return emit_pattern_before_setloc (pattern, before, loc, false,
5078 make_debug_insn_raw);
b5b8b0ac
AO
5079}
5080
e8110d6f 5081/* Like emit_debug_insn_before_noloc,
5368224f 5082 but set insn_location according to BEFORE. */
cd459bf8 5083rtx_insn *
3a6216b0 5084emit_debug_insn_before (rtx pattern, rtx_insn *before)
b5b8b0ac 5085{
e8110d6f
NF
5086 return emit_pattern_before (pattern, before, false, false,
5087 make_debug_insn_raw);
b5b8b0ac 5088}
0d682900 5089\f
2f937369
DM
5090/* Take X and emit it at the end of the doubly-linked
5091 INSN list.
23b2ce53
RS
5092
5093 Returns the last insn emitted. */
5094
cd459bf8 5095rtx_insn *
502b8322 5096emit_insn (rtx x)
23b2ce53 5097{
cd459bf8
DM
5098 rtx_insn *last = get_last_insn ();
5099 rtx_insn *insn;
23b2ce53 5100
2f937369
DM
5101 if (x == NULL_RTX)
5102 return last;
23b2ce53 5103
2f937369
DM
5104 switch (GET_CODE (x))
5105 {
b5b8b0ac 5106 case DEBUG_INSN:
2f937369
DM
5107 case INSN:
5108 case JUMP_INSN:
5109 case CALL_INSN:
5110 case CODE_LABEL:
5111 case BARRIER:
5112 case NOTE:
cd459bf8 5113 insn = as_a <rtx_insn *> (x);
2f937369 5114 while (insn)
23b2ce53 5115 {
cd459bf8 5116 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 5117 add_insn (insn);
2f937369
DM
5118 last = insn;
5119 insn = next;
23b2ce53 5120 }
2f937369 5121 break;
23b2ce53 5122
2f937369 5123#ifdef ENABLE_RTL_CHECKING
39718607 5124 case JUMP_TABLE_DATA:
2f937369 5125 case SEQUENCE:
5b0264cb 5126 gcc_unreachable ();
2f937369
DM
5127 break;
5128#endif
23b2ce53 5129
2f937369
DM
5130 default:
5131 last = make_insn_raw (x);
5132 add_insn (last);
5133 break;
23b2ce53
RS
5134 }
5135
5136 return last;
5137}
5138
b5b8b0ac
AO
5139/* Make an insn of code DEBUG_INSN with pattern X
5140 and add it to the end of the doubly-linked list. */
5141
cd459bf8 5142rtx_insn *
b5b8b0ac
AO
5143emit_debug_insn (rtx x)
5144{
cd459bf8
DM
5145 rtx_insn *last = get_last_insn ();
5146 rtx_insn *insn;
b5b8b0ac
AO
5147
5148 if (x == NULL_RTX)
5149 return last;
5150
5151 switch (GET_CODE (x))
5152 {
5153 case DEBUG_INSN:
5154 case INSN:
5155 case JUMP_INSN:
5156 case CALL_INSN:
5157 case CODE_LABEL:
5158 case BARRIER:
5159 case NOTE:
cd459bf8 5160 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
5161 while (insn)
5162 {
cd459bf8 5163 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
5164 add_insn (insn);
5165 last = insn;
5166 insn = next;
5167 }
5168 break;
5169
5170#ifdef ENABLE_RTL_CHECKING
39718607 5171 case JUMP_TABLE_DATA:
b5b8b0ac
AO
5172 case SEQUENCE:
5173 gcc_unreachable ();
5174 break;
5175#endif
5176
5177 default:
5178 last = make_debug_insn_raw (x);
5179 add_insn (last);
5180 break;
5181 }
5182
5183 return last;
5184}
5185
2f937369
DM
5186/* Make an insn of code JUMP_INSN with pattern X
5187 and add it to the end of the doubly-linked list. */
23b2ce53 5188
cd459bf8 5189rtx_insn *
502b8322 5190emit_jump_insn (rtx x)
23b2ce53 5191{
cd459bf8
DM
5192 rtx_insn *last = NULL;
5193 rtx_insn *insn;
23b2ce53 5194
2f937369 5195 switch (GET_CODE (x))
23b2ce53 5196 {
b5b8b0ac 5197 case DEBUG_INSN:
2f937369
DM
5198 case INSN:
5199 case JUMP_INSN:
5200 case CALL_INSN:
5201 case CODE_LABEL:
5202 case BARRIER:
5203 case NOTE:
cd459bf8 5204 insn = as_a <rtx_insn *> (x);
2f937369
DM
5205 while (insn)
5206 {
cd459bf8 5207 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5208 add_insn (insn);
5209 last = insn;
5210 insn = next;
5211 }
5212 break;
e0a5c5eb 5213
2f937369 5214#ifdef ENABLE_RTL_CHECKING
39718607 5215 case JUMP_TABLE_DATA:
2f937369 5216 case SEQUENCE:
5b0264cb 5217 gcc_unreachable ();
2f937369
DM
5218 break;
5219#endif
e0a5c5eb 5220
2f937369
DM
5221 default:
5222 last = make_jump_insn_raw (x);
5223 add_insn (last);
5224 break;
3c030e88 5225 }
e0a5c5eb
RS
5226
5227 return last;
5228}
5229
2f937369 5230/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5231 and add it to the end of the doubly-linked list. */
5232
cd459bf8 5233rtx_insn *
502b8322 5234emit_call_insn (rtx x)
23b2ce53 5235{
cd459bf8 5236 rtx_insn *insn;
2f937369
DM
5237
5238 switch (GET_CODE (x))
23b2ce53 5239 {
b5b8b0ac 5240 case DEBUG_INSN:
2f937369
DM
5241 case INSN:
5242 case JUMP_INSN:
5243 case CALL_INSN:
5244 case CODE_LABEL:
5245 case BARRIER:
5246 case NOTE:
5247 insn = emit_insn (x);
5248 break;
23b2ce53 5249
2f937369
DM
5250#ifdef ENABLE_RTL_CHECKING
5251 case SEQUENCE:
39718607 5252 case JUMP_TABLE_DATA:
5b0264cb 5253 gcc_unreachable ();
2f937369
DM
5254 break;
5255#endif
23b2ce53 5256
2f937369
DM
5257 default:
5258 insn = make_call_insn_raw (x);
23b2ce53 5259 add_insn (insn);
2f937369 5260 break;
23b2ce53 5261 }
2f937369
DM
5262
5263 return insn;
23b2ce53
RS
5264}
5265
5266/* Add the label LABEL to the end of the doubly-linked list. */
5267
1476d1bd
MM
5268rtx_code_label *
5269emit_label (rtx uncast_label)
23b2ce53 5270{
1476d1bd
MM
5271 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5272
468660d3
SB
5273 gcc_checking_assert (INSN_UID (label) == 0);
5274 INSN_UID (label) = cur_insn_uid++;
1476d1bd
MM
5275 add_insn (label);
5276 return label;
23b2ce53
RS
5277}
5278
39718607
SB
5279/* Make an insn of code JUMP_TABLE_DATA
5280 and add it to the end of the doubly-linked list. */
5281
4598afdd 5282rtx_jump_table_data *
39718607
SB
5283emit_jump_table_data (rtx table)
5284{
4598afdd
DM
5285 rtx_jump_table_data *jump_table_data =
5286 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5287 INSN_UID (jump_table_data) = cur_insn_uid++;
5288 PATTERN (jump_table_data) = table;
5289 BLOCK_FOR_INSN (jump_table_data) = NULL;
5290 add_insn (jump_table_data);
5291 return jump_table_data;
5292}
5293
23b2ce53
RS
5294/* Make an insn of code BARRIER
5295 and add it to the end of the doubly-linked list. */
5296
cd459bf8 5297rtx_barrier *
502b8322 5298emit_barrier (void)
23b2ce53 5299{
cd459bf8 5300 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5301 INSN_UID (barrier) = cur_insn_uid++;
5302 add_insn (barrier);
5303 return barrier;
5304}
5305
5f2fc772 5306/* Emit a copy of note ORIG. */
502b8322 5307
66e8df53
DM
5308rtx_note *
5309emit_note_copy (rtx_note *orig)
5f2fc772 5310{
96fba521 5311 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5312 rtx_note *note = make_note_raw (kind);
5f2fc772 5313 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5314 add_insn (note);
2e040219 5315 return note;
23b2ce53
RS
5316}
5317
2e040219
NS
5318/* Make an insn of code NOTE or type NOTE_NO
5319 and add it to the end of the doubly-linked list. */
23b2ce53 5320
66e8df53 5321rtx_note *
a38e7aa5 5322emit_note (enum insn_note kind)
23b2ce53 5323{
66e8df53 5324 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5325 add_insn (note);
5326 return note;
5327}
5328
c41c1387
RS
5329/* Emit a clobber of lvalue X. */
5330
cd459bf8 5331rtx_insn *
c41c1387
RS
5332emit_clobber (rtx x)
5333{
5334 /* CONCATs should not appear in the insn stream. */
5335 if (GET_CODE (x) == CONCAT)
5336 {
5337 emit_clobber (XEXP (x, 0));
5338 return emit_clobber (XEXP (x, 1));
5339 }
5340 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5341}
5342
5343/* Return a sequence of insns to clobber lvalue X. */
5344
cd459bf8 5345rtx_insn *
c41c1387
RS
5346gen_clobber (rtx x)
5347{
cd459bf8 5348 rtx_insn *seq;
c41c1387
RS
5349
5350 start_sequence ();
5351 emit_clobber (x);
5352 seq = get_insns ();
5353 end_sequence ();
5354 return seq;
5355}
5356
5357/* Emit a use of rvalue X. */
5358
cd459bf8 5359rtx_insn *
c41c1387
RS
5360emit_use (rtx x)
5361{
5362 /* CONCATs should not appear in the insn stream. */
5363 if (GET_CODE (x) == CONCAT)
5364 {
5365 emit_use (XEXP (x, 0));
5366 return emit_use (XEXP (x, 1));
5367 }
5368 return emit_insn (gen_rtx_USE (VOIDmode, x));
5369}
5370
5371/* Return a sequence of insns to use rvalue X. */
5372
cd459bf8 5373rtx_insn *
c41c1387
RS
5374gen_use (rtx x)
5375{
cd459bf8 5376 rtx_insn *seq;
c41c1387
RS
5377
5378 start_sequence ();
5379 emit_use (x);
5380 seq = get_insns ();
5381 end_sequence ();
5382 return seq;
5383}
5384
c8912e53
RS
5385/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5386 Return the set in INSN that such notes describe, or NULL if the notes
5387 have no meaning for INSN. */
5388
5389rtx
5390set_for_reg_notes (rtx insn)
5391{
5392 rtx pat, reg;
5393
5394 if (!INSN_P (insn))
5395 return NULL_RTX;
5396
5397 pat = PATTERN (insn);
5398 if (GET_CODE (pat) == PARALLEL)
5399 {
5400 /* We do not use single_set because that ignores SETs of unused
5401 registers. REG_EQUAL and REG_EQUIV notes really do require the
5402 PARALLEL to have a single SET. */
5403 if (multiple_sets (insn))
5404 return NULL_RTX;
5405 pat = XVECEXP (pat, 0, 0);
5406 }
5407
5408 if (GET_CODE (pat) != SET)
5409 return NULL_RTX;
5410
5411 reg = SET_DEST (pat);
5412
5413 /* Notes apply to the contents of a STRICT_LOW_PART. */
7f7379f6
KV
5414 if (GET_CODE (reg) == STRICT_LOW_PART
5415 || GET_CODE (reg) == ZERO_EXTRACT)
c8912e53
RS
5416 reg = XEXP (reg, 0);
5417
5418 /* Check that we have a register. */
5419 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5420 return NULL_RTX;
5421
5422 return pat;
5423}
5424
87b47c85 5425/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5426 note of this type already exists, remove it first. */
87b47c85 5427
3d238248 5428rtx
502b8322 5429set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5430{
5431 rtx note = find_reg_note (insn, kind, NULL_RTX);
5432
52488da1
JW
5433 switch (kind)
5434 {
5435 case REG_EQUAL:
5436 case REG_EQUIV:
8073cbd4
EB
5437 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5438 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
c8912e53 5439 return NULL_RTX;
52488da1
JW
5440
5441 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5442 It serves no useful purpose and breaks eliminate_regs. */
5443 if (GET_CODE (datum) == ASM_OPERANDS)
5444 return NULL_RTX;
109374e2
RS
5445
5446 /* Notes with side effects are dangerous. Even if the side-effect
5447 initially mirrors one in PATTERN (INSN), later optimizations
5448 might alter the way that the final register value is calculated
5449 and so move or alter the side-effect in some way. The note would
5450 then no longer be a valid substitution for SET_SRC. */
5451 if (side_effects_p (datum))
5452 return NULL_RTX;
52488da1
JW
5453 break;
5454
5455 default:
5456 break;
5457 }
3d238248 5458
c8912e53
RS
5459 if (note)
5460 XEXP (note, 0) = datum;
5461 else
5462 {
5463 add_reg_note (insn, kind, datum);
5464 note = REG_NOTES (insn);
5465 }
6fb5fa3c
DB
5466
5467 switch (kind)
3d238248 5468 {
6fb5fa3c
DB
5469 case REG_EQUAL:
5470 case REG_EQUIV:
b2908ba6 5471 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5472 break;
5473 default:
5474 break;
3d238248 5475 }
87b47c85 5476
c8912e53 5477 return note;
87b47c85 5478}
7543f918
JR
5479
5480/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5481rtx
5482set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5483{
c8912e53 5484 rtx set = set_for_reg_notes (insn);
7543f918
JR
5485
5486 if (set && SET_DEST (set) == dst)
5487 return set_unique_reg_note (insn, kind, datum);
5488 return NULL_RTX;
5489}
23b2ce53 5490\f
9d8895c9
RS
5491/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5492 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5493 is true.
5494
23b2ce53
RS
5495 If X is a label, it is simply added into the insn chain. */
5496
cd459bf8 5497rtx_insn *
9d8895c9 5498emit (rtx x, bool allow_barrier_p)
23b2ce53
RS
5499{
5500 enum rtx_code code = classify_insn (x);
5501
5b0264cb 5502 switch (code)
23b2ce53 5503 {
5b0264cb
NS
5504 case CODE_LABEL:
5505 return emit_label (x);
5506 case INSN:
5507 return emit_insn (x);
5508 case JUMP_INSN:
5509 {
cd459bf8 5510 rtx_insn *insn = emit_jump_insn (x);
9d8895c9
RS
5511 if (allow_barrier_p
5512 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5b0264cb
NS
5513 return emit_barrier ();
5514 return insn;
5515 }
5516 case CALL_INSN:
5517 return emit_call_insn (x);
b5b8b0ac
AO
5518 case DEBUG_INSN:
5519 return emit_debug_insn (x);
5b0264cb
NS
5520 default:
5521 gcc_unreachable ();
23b2ce53 5522 }
23b2ce53
RS
5523}
5524\f
e2500fed 5525/* Space for free sequence stack entries. */
1431042e 5526static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5527
4dfa0342
RH
5528/* Begin emitting insns to a sequence. If this sequence will contain
5529 something that might cause the compiler to pop arguments to function
5530 calls (because those pops have previously been deferred; see
5531 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5532 before calling this function. That will ensure that the deferred
5533 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5534
5535void
502b8322 5536start_sequence (void)
23b2ce53
RS
5537{
5538 struct sequence_stack *tem;
5539
e2500fed
GK
5540 if (free_sequence_stack != NULL)
5541 {
5542 tem = free_sequence_stack;
5543 free_sequence_stack = tem->next;
5544 }
5545 else
766090c2 5546 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5547
614d5bd8 5548 tem->next = get_current_sequence ()->next;
5936d944
JH
5549 tem->first = get_insns ();
5550 tem->last = get_last_insn ();
614d5bd8 5551 get_current_sequence ()->next = tem;
23b2ce53 5552
5936d944
JH
5553 set_first_insn (0);
5554 set_last_insn (0);
23b2ce53
RS
5555}
5556
5c7a310f
MM
5557/* Set up the insn chain starting with FIRST as the current sequence,
5558 saving the previously current one. See the documentation for
5559 start_sequence for more information about how to use this function. */
23b2ce53
RS
5560
5561void
fee3e72c 5562push_to_sequence (rtx_insn *first)
23b2ce53 5563{
fee3e72c 5564 rtx_insn *last;
23b2ce53
RS
5565
5566 start_sequence ();
5567
e84a58ff
EB
5568 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5569 ;
23b2ce53 5570
5936d944
JH
5571 set_first_insn (first);
5572 set_last_insn (last);
23b2ce53
RS
5573}
5574
bb27eeda
SE
5575/* Like push_to_sequence, but take the last insn as an argument to avoid
5576 looping through the list. */
5577
5578void
fee3e72c 5579push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5580{
5581 start_sequence ();
5582
5936d944
JH
5583 set_first_insn (first);
5584 set_last_insn (last);
bb27eeda
SE
5585}
5586
f15ae3a1
TW
5587/* Set up the outer-level insn chain
5588 as the current sequence, saving the previously current one. */
5589
5590void
502b8322 5591push_topmost_sequence (void)
f15ae3a1 5592{
614d5bd8 5593 struct sequence_stack *top;
f15ae3a1
TW
5594
5595 start_sequence ();
5596
614d5bd8 5597 top = get_topmost_sequence ();
5936d944
JH
5598 set_first_insn (top->first);
5599 set_last_insn (top->last);
f15ae3a1
TW
5600}
5601
5602/* After emitting to the outer-level insn chain, update the outer-level
5603 insn chain, and restore the previous saved state. */
5604
5605void
502b8322 5606pop_topmost_sequence (void)
f15ae3a1 5607{
614d5bd8 5608 struct sequence_stack *top;
f15ae3a1 5609
614d5bd8 5610 top = get_topmost_sequence ();
5936d944
JH
5611 top->first = get_insns ();
5612 top->last = get_last_insn ();
f15ae3a1
TW
5613
5614 end_sequence ();
5615}
5616
23b2ce53
RS
5617/* After emitting to a sequence, restore previous saved state.
5618
5c7a310f 5619 To get the contents of the sequence just made, you must call
2f937369 5620 `get_insns' *before* calling here.
5c7a310f
MM
5621
5622 If the compiler might have deferred popping arguments while
5623 generating this sequence, and this sequence will not be immediately
5624 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5625 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5626 pops are inserted into this sequence, and not into some random
5627 location in the instruction stream. See INHIBIT_DEFER_POP for more
5628 information about deferred popping of arguments. */
23b2ce53
RS
5629
5630void
502b8322 5631end_sequence (void)
23b2ce53 5632{
614d5bd8 5633 struct sequence_stack *tem = get_current_sequence ()->next;
23b2ce53 5634
5936d944
JH
5635 set_first_insn (tem->first);
5636 set_last_insn (tem->last);
614d5bd8 5637 get_current_sequence ()->next = tem->next;
23b2ce53 5638
e2500fed
GK
5639 memset (tem, 0, sizeof (*tem));
5640 tem->next = free_sequence_stack;
5641 free_sequence_stack = tem;
23b2ce53
RS
5642}
5643
5644/* Return 1 if currently emitting into a sequence. */
5645
5646int
502b8322 5647in_sequence_p (void)
23b2ce53 5648{
614d5bd8 5649 return get_current_sequence ()->next != 0;
23b2ce53 5650}
23b2ce53 5651\f
59ec66dc
MM
5652/* Put the various virtual registers into REGNO_REG_RTX. */
5653
2bbdec73 5654static void
bd60bab2 5655init_virtual_regs (void)
59ec66dc 5656{
bd60bab2
JH
5657 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5658 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5659 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5660 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5661 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5662 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5663 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5664}
5665
da43a810
BS
5666\f
5667/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5668static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5669static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5670static int copy_insn_n_scratches;
5671
5672/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5673 copied an ASM_OPERANDS.
5674 In that case, it is the original input-operand vector. */
5675static rtvec orig_asm_operands_vector;
5676
5677/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5678 copied an ASM_OPERANDS.
5679 In that case, it is the copied input-operand vector. */
5680static rtvec copy_asm_operands_vector;
5681
5682/* Likewise for the constraints vector. */
5683static rtvec orig_asm_constraints_vector;
5684static rtvec copy_asm_constraints_vector;
5685
5686/* Recursively create a new copy of an rtx for copy_insn.
5687 This function differs from copy_rtx in that it handles SCRATCHes and
5688 ASM_OPERANDs properly.
5689 Normally, this function is not used directly; use copy_insn as front end.
5690 However, you could first copy an insn pattern with copy_insn and then use
5691 this function afterwards to properly copy any REG_NOTEs containing
5692 SCRATCHes. */
5693
5694rtx
502b8322 5695copy_insn_1 (rtx orig)
da43a810 5696{
b3694847
SS
5697 rtx copy;
5698 int i, j;
5699 RTX_CODE code;
5700 const char *format_ptr;
da43a810 5701
cd9c1ca8
RH
5702 if (orig == NULL)
5703 return NULL;
5704
da43a810
BS
5705 code = GET_CODE (orig);
5706
5707 switch (code)
5708 {
5709 case REG:
a52a87c3 5710 case DEBUG_EXPR:
d8116890 5711 CASE_CONST_ANY:
da43a810
BS
5712 case SYMBOL_REF:
5713 case CODE_LABEL:
5714 case PC:
5715 case CC0:
276e0224 5716 case RETURN:
26898771 5717 case SIMPLE_RETURN:
da43a810 5718 return orig;
3e89ed8d 5719 case CLOBBER:
14196e02 5720 case CLOBBER_HIGH:
c5c5ba89
JH
5721 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5722 clobbers or clobbers of hard registers that originated as pseudos.
5723 This is needed to allow safe register renaming. */
d7ae3739
EB
5724 if (REG_P (XEXP (orig, 0))
5725 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5726 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
3e89ed8d
JH
5727 return orig;
5728 break;
da43a810
BS
5729
5730 case SCRATCH:
5731 for (i = 0; i < copy_insn_n_scratches; i++)
5732 if (copy_insn_scratch_in[i] == orig)
5733 return copy_insn_scratch_out[i];
5734 break;
5735
5736 case CONST:
6fb5fa3c 5737 if (shared_const_p (orig))
da43a810
BS
5738 return orig;
5739 break;
750c9258 5740
da43a810
BS
5741 /* A MEM with a constant address is not sharable. The problem is that
5742 the constant address may need to be reloaded. If the mem is shared,
5743 then reloading one copy of this mem will cause all copies to appear
5744 to have been reloaded. */
5745
5746 default:
5747 break;
5748 }
5749
aacd3885
RS
5750 /* Copy the various flags, fields, and other information. We assume
5751 that all fields need copying, and then clear the fields that should
da43a810
BS
5752 not be copied. That is the sensible default behavior, and forces
5753 us to explicitly document why we are *not* copying a flag. */
aacd3885 5754 copy = shallow_copy_rtx (orig);
da43a810 5755
da43a810 5756 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5757 if (INSN_P (orig))
da43a810 5758 {
2adc7f12
JJ
5759 RTX_FLAG (copy, jump) = 0;
5760 RTX_FLAG (copy, call) = 0;
5761 RTX_FLAG (copy, frame_related) = 0;
da43a810 5762 }
750c9258 5763
da43a810
BS
5764 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5765
5766 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5767 switch (*format_ptr++)
5768 {
5769 case 'e':
5770 if (XEXP (orig, i) != NULL)
5771 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5772 break;
da43a810 5773
aacd3885
RS
5774 case 'E':
5775 case 'V':
5776 if (XVEC (orig, i) == orig_asm_constraints_vector)
5777 XVEC (copy, i) = copy_asm_constraints_vector;
5778 else if (XVEC (orig, i) == orig_asm_operands_vector)
5779 XVEC (copy, i) = copy_asm_operands_vector;
5780 else if (XVEC (orig, i) != NULL)
5781 {
5782 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5783 for (j = 0; j < XVECLEN (copy, i); j++)
5784 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5785 }
5786 break;
da43a810 5787
aacd3885
RS
5788 case 't':
5789 case 'w':
5790 case 'i':
91914e56 5791 case 'p':
aacd3885
RS
5792 case 's':
5793 case 'S':
5794 case 'u':
5795 case '0':
5796 /* These are left unchanged. */
5797 break;
da43a810 5798
aacd3885
RS
5799 default:
5800 gcc_unreachable ();
5801 }
da43a810
BS
5802
5803 if (code == SCRATCH)
5804 {
5805 i = copy_insn_n_scratches++;
5b0264cb 5806 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5807 copy_insn_scratch_in[i] = orig;
5808 copy_insn_scratch_out[i] = copy;
5809 }
5810 else if (code == ASM_OPERANDS)
5811 {
6462bb43
AO
5812 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5813 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5814 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5815 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5816 }
5817
5818 return copy;
5819}
5820
5821/* Create a new copy of an rtx.
5822 This function differs from copy_rtx in that it handles SCRATCHes and
5823 ASM_OPERANDs properly.
5824 INSN doesn't really have to be a full INSN; it could be just the
5825 pattern. */
5826rtx
502b8322 5827copy_insn (rtx insn)
da43a810
BS
5828{
5829 copy_insn_n_scratches = 0;
5830 orig_asm_operands_vector = 0;
5831 orig_asm_constraints_vector = 0;
5832 copy_asm_operands_vector = 0;
5833 copy_asm_constraints_vector = 0;
5834 return copy_insn_1 (insn);
5835}
59ec66dc 5836
8e383849
JR
5837/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5838 on that assumption that INSN itself remains in its original place. */
5839
f8f0516e
DM
5840rtx_insn *
5841copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5842{
5843 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5844 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5845 INSN_UID (insn) = cur_insn_uid++;
5846 return insn;
5847}
5848
23b2ce53
RS
5849/* Initialize data structures and variables in this file
5850 before generating rtl for each function. */
5851
5852void
502b8322 5853init_emit (void)
23b2ce53 5854{
5936d944
JH
5855 set_first_insn (NULL);
5856 set_last_insn (NULL);
b5b8b0ac
AO
5857 if (MIN_NONDEBUG_INSN_UID)
5858 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5859 else
5860 cur_insn_uid = 1;
5861 cur_debug_insn_uid = 1;
23b2ce53 5862 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5863 first_label_num = label_num;
614d5bd8 5864 get_current_sequence ()->next = NULL;
23b2ce53 5865
23b2ce53
RS
5866 /* Init the tables that describe all the pseudo regs. */
5867
3e029763 5868 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5869
3e029763 5870 crtl->emit.regno_pointer_align
1b4572a8 5871 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5872
f44986d7
DM
5873 regno_reg_rtx
5874 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5875
e50126e8 5876 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5877 memcpy (regno_reg_rtx,
5fb0e246 5878 initial_regno_reg_rtx,
6cde4876 5879 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5880
23b2ce53 5881 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5882 init_virtual_regs ();
740ab4a2
RK
5883
5884 /* Indicate that the virtual registers and stack locations are
5885 all pointers. */
3502dc9c
JDA
5886 REG_POINTER (stack_pointer_rtx) = 1;
5887 REG_POINTER (frame_pointer_rtx) = 1;
5888 REG_POINTER (hard_frame_pointer_rtx) = 1;
5889 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5890
3502dc9c
JDA
5891 REG_POINTER (virtual_incoming_args_rtx) = 1;
5892 REG_POINTER (virtual_stack_vars_rtx) = 1;
5893 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5894 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5895 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5896
86fe05e0 5897#ifdef STACK_BOUNDARY
bdb429a5
RK
5898 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5899 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5900 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5901 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5902
5903 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5904 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5905 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5906 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
da75ca93 5907
bdb429a5 5908 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5909#endif
5910
5e82e7bd
JVA
5911#ifdef INIT_EXPANDERS
5912 INIT_EXPANDERS;
5913#endif
23b2ce53
RS
5914}
5915
cd5ff7bc
RS
5916/* Return the value of element I of CONST_VECTOR X as a wide_int. */
5917
5918wide_int
5919const_vector_int_elt (const_rtx x, unsigned int i)
5920{
5921 /* First handle elements that are directly encoded. */
5922 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5923 if (i < (unsigned int) XVECLEN (x, 0))
5924 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5925
5926 /* Identify the pattern that contains element I and work out the index of
5927 the last encoded element for that pattern. */
5928 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5929 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5930 unsigned int count = i / npatterns;
5931 unsigned int pattern = i % npatterns;
5932 unsigned int final_i = encoded_nelts - npatterns + pattern;
5933
5934 /* If there are no steps, the final encoded value is the right one. */
5935 if (!CONST_VECTOR_STEPPED_P (x))
5936 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5937
5938 /* Otherwise work out the value from the last two encoded elements. */
5939 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5940 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5941 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5942 rtx_mode_t (v1, elt_mode));
5943 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5944}
5945
5946/* Return the value of element I of CONST_VECTOR X. */
5947
5948rtx
5949const_vector_elt (const_rtx x, unsigned int i)
5950{
5951 /* First handle elements that are directly encoded. */
5952 if (i < (unsigned int) XVECLEN (x, 0))
5953 return CONST_VECTOR_ENCODED_ELT (x, i);
5954
5955 /* If there are no steps, the final encoded value is the right one. */
5956 if (!CONST_VECTOR_STEPPED_P (x))
5957 {
5958 /* Identify the pattern that contains element I and work out the index of
5959 the last encoded element for that pattern. */
5960 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5961 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5962 unsigned int pattern = i % npatterns;
5963 unsigned int final_i = encoded_nelts - npatterns + pattern;
5964 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5965 }
5966
5967 /* Otherwise work out the value from the last two encoded elements. */
5968 return immed_wide_int_const (const_vector_int_elt (x, i),
5969 GET_MODE_INNER (GET_MODE (x)));
5970}
5971
c0cc00c4
JJ
5972/* Return true if X is a valid element for a CONST_VECTOR of the given
5973 mode. */
9b4473b6
RS
5974
5975bool
c0cc00c4 5976valid_for_const_vector_p (machine_mode, rtx x)
9b4473b6
RS
5977{
5978 return (CONST_SCALAR_INT_P (x)
5979 || CONST_DOUBLE_AS_FLOAT_P (x)
5980 || CONST_FIXED_P (x));
5981}
5982
59d06c05
RS
5983/* Generate a vector constant of mode MODE in which every element has
5984 value ELT. */
69ef87e2 5985
59d06c05
RS
5986rtx
5987gen_const_vec_duplicate (machine_mode mode, rtx elt)
5988{
3877c560
RS
5989 rtx_vector_builder builder (mode, 1, 1);
5990 builder.quick_push (elt);
5991 return builder.build ();
59d06c05
RS
5992}
5993
5994/* Return a vector rtx of mode MODE in which every element has value X.
5995 The result will be a constant if X is constant. */
5996
5997rtx
5998gen_vec_duplicate (machine_mode mode, rtx x)
5999{
c0cc00c4 6000 if (valid_for_const_vector_p (mode, x))
59d06c05
RS
6001 return gen_const_vec_duplicate (mode, x);
6002 return gen_rtx_VEC_DUPLICATE (mode, x);
6003}
15ed7b52 6004
3877c560
RS
6005/* A subroutine of const_vec_series_p that handles the case in which:
6006
6007 (GET_CODE (X) == CONST_VECTOR
6008 && CONST_VECTOR_NPATTERNS (X) == 1
6009 && !CONST_VECTOR_DUPLICATE_P (X))
6010
6011 is known to hold. */
ef339d6e
RS
6012
6013bool
6014const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
6015{
3877c560
RS
6016 /* Stepped sequences are only defined for integers, to avoid specifying
6017 rounding behavior. */
6018 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
6019 return false;
6020
6021 /* A non-duplicated vector with two elements can always be seen as a
6022 series with a nonzero step. Longer vectors must have a stepped
6023 encoding. */
7b777afa 6024 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
3877c560 6025 && !CONST_VECTOR_STEPPED_P (x))
ef339d6e
RS
6026 return false;
6027
3877c560 6028 /* Calculate the step between the first and second elements. */
ef339d6e
RS
6029 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
6030 rtx base = CONST_VECTOR_ELT (x, 0);
6031 rtx step = simplify_binary_operation (MINUS, inner,
3877c560 6032 CONST_VECTOR_ENCODED_ELT (x, 1), base);
ef339d6e
RS
6033 if (rtx_equal_p (step, CONST0_RTX (inner)))
6034 return false;
6035
3877c560
RS
6036 /* If we have a stepped encoding, check that the step between the
6037 second and third elements is the same as STEP. */
6038 if (CONST_VECTOR_STEPPED_P (x))
ef339d6e
RS
6039 {
6040 rtx diff = simplify_binary_operation (MINUS, inner,
3877c560
RS
6041 CONST_VECTOR_ENCODED_ELT (x, 2),
6042 CONST_VECTOR_ENCODED_ELT (x, 1));
ef339d6e
RS
6043 if (!rtx_equal_p (step, diff))
6044 return false;
6045 }
6046
6047 *base_out = base;
6048 *step_out = step;
6049 return true;
6050}
6051
6052/* Generate a vector constant of mode MODE in which element I has
6053 the value BASE + I * STEP. */
6054
6055rtx
6056gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6057{
af2e4475
RS
6058 gcc_assert (valid_for_const_vector_p (mode, base)
6059 && valid_for_const_vector_p (mode, step));
ef339d6e 6060
3877c560
RS
6061 rtx_vector_builder builder (mode, 1, 3);
6062 builder.quick_push (base);
6063 for (int i = 1; i < 3; ++i)
6064 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6065 builder[i - 1], step));
6066 return builder.build ();
ef339d6e
RS
6067}
6068
6069/* Generate a vector of mode MODE in which element I has the value
6070 BASE + I * STEP. The result will be a constant if BASE and STEP
6071 are both constants. */
6072
6073rtx
6074gen_vec_series (machine_mode mode, rtx base, rtx step)
6075{
6076 if (step == const0_rtx)
6077 return gen_vec_duplicate (mode, base);
af2e4475
RS
6078 if (valid_for_const_vector_p (mode, base)
6079 && valid_for_const_vector_p (mode, step))
ef339d6e
RS
6080 return gen_const_vec_series (mode, base, step);
6081 return gen_rtx_VEC_SERIES (mode, base, step);
6082}
6083
59d06c05
RS
6084/* Generate a new vector constant for mode MODE and constant value
6085 CONSTANT. */
69ef87e2 6086
59d06c05
RS
6087static rtx
6088gen_const_vector (machine_mode mode, int constant)
6089{
6090 machine_mode inner = GET_MODE_INNER (mode);
69ef87e2 6091
59d06c05
RS
6092 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6093
6094 rtx el = const_tiny_rtx[constant][(int) inner];
6095 gcc_assert (el);
69ef87e2 6096
3877c560 6097 return gen_const_vec_duplicate (mode, el);
69ef87e2
AH
6098}
6099
a06e3c40 6100/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 6101 all elements are zero, and the one vector when all elements are one. */
a06e3c40 6102rtx
ef4bddc2 6103gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
a06e3c40 6104{
7b777afa 6105 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
a73b091d
JW
6106
6107 /* If the values are all the same, check to see if we can use one of the
6108 standard constant vectors. */
59d06c05
RS
6109 if (rtvec_all_equal_p (v))
6110 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
a73b091d 6111
3877c560
RS
6112 unsigned int nunits = GET_NUM_ELEM (v);
6113 rtx_vector_builder builder (mode, nunits, 1);
6114 for (unsigned int i = 0; i < nunits; ++i)
6115 builder.quick_push (RTVEC_ELT (v, i));
6116 return builder.build (v);
a06e3c40
R
6117}
6118
b5deb7b6
SL
6119/* Initialise global register information required by all functions. */
6120
6121void
6122init_emit_regs (void)
6123{
6124 int i;
ef4bddc2 6125 machine_mode mode;
1c3f523e 6126 mem_attrs *attrs;
b5deb7b6
SL
6127
6128 /* Reset register attributes */
aebf76a2 6129 reg_attrs_htab->empty ();
b5deb7b6
SL
6130
6131 /* We need reg_raw_mode, so initialize the modes now. */
6132 init_reg_modes_target ();
6133
6134 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
6135 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6136 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6137 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6138 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6139 virtual_incoming_args_rtx =
6140 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6141 virtual_stack_vars_rtx =
6142 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6143 virtual_stack_dynamic_rtx =
6144 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6145 virtual_outgoing_args_rtx =
6146 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6147 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
6148 virtual_preferred_stack_boundary_rtx =
6149 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
6150
6151 /* Initialize RTL for commonly used hard registers. These are
6152 copied into regno_reg_rtx as we begin to compile each function. */
6153 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 6154 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
6155
6156#ifdef RETURN_ADDRESS_POINTER_REGNUM
6157 return_address_pointer_rtx
6158 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6159#endif
6160
ca72dad5 6161 pic_offset_table_rtx = NULL_RTX;
b5deb7b6
SL
6162 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6163 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
1c3f523e
RS
6164
6165 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6166 {
ef4bddc2 6167 mode = (machine_mode) i;
766090c2 6168 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
6169 attrs->align = BITS_PER_UNIT;
6170 attrs->addrspace = ADDR_SPACE_GENERIC;
532c7a45 6171 if (mode != BLKmode && mode != VOIDmode)
1c3f523e 6172 {
754c3d5d
RS
6173 attrs->size_known_p = true;
6174 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
6175 if (STRICT_ALIGNMENT)
6176 attrs->align = GET_MODE_ALIGNMENT (mode);
6177 }
6178 mode_mem_attrs[i] = attrs;
6179 }
af364399
ML
6180
6181 split_branch_probability = profile_probability::uninitialized ();
b5deb7b6
SL
6182}
6183
aa3a12d6
RS
6184/* Initialize global machine_mode variables. */
6185
6186void
6187init_derived_machine_modes (void)
6188{
501623d4
RS
6189 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6190 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
aa3a12d6 6191 {
501623d4
RS
6192 scalar_int_mode mode = mode_iter.require ();
6193
aa3a12d6 6194 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
501623d4
RS
6195 && !opt_byte_mode.exists ())
6196 opt_byte_mode = mode;
aa3a12d6
RS
6197
6198 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
501623d4
RS
6199 && !opt_word_mode.exists ())
6200 opt_word_mode = mode;
aa3a12d6
RS
6201 }
6202
501623d4
RS
6203 byte_mode = opt_byte_mode.require ();
6204 word_mode = opt_word_mode.require ();
f95c5b8e
RS
6205 ptr_mode = as_a <scalar_int_mode>
6206 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
aa3a12d6
RS
6207}
6208
2d888286 6209/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
6210
6211void
2d888286 6212init_emit_once (void)
23b2ce53
RS
6213{
6214 int i;
ef4bddc2 6215 machine_mode mode;
857c7b46 6216 scalar_float_mode double_mode;
16d22000 6217 opt_scalar_mode smode_iter;
23b2ce53 6218
807e902e
KZ
6219 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6220 CONST_FIXED, and memory attribute hash tables. */
aebf76a2 6221 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
173b24b9 6222
807e902e 6223#if TARGET_SUPPORTS_WIDE_INT
aebf76a2 6224 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
807e902e 6225#endif
aebf76a2 6226 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5692c7bc 6227
0c12fc9b
RS
6228 if (NUM_POLY_INT_COEFFS > 1)
6229 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6230
aebf76a2 6231 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
091a3ac7 6232
aebf76a2 6233 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
67673f5c 6234
5da077de 6235#ifdef INIT_EXPANDERS
414c4dc4
NC
6236 /* This is to initialize {init|mark|free}_machine_status before the first
6237 call to push_function_context_to. This is needed by the Chill front
a1f300c0 6238 end which calls push_function_context_to before the first call to
5da077de
AS
6239 init_function_start. */
6240 INIT_EXPANDERS;
6241#endif
6242
23b2ce53
RS
6243 /* Create the unique rtx's for certain rtx codes and operand values. */
6244
ecf835e9
KN
6245 /* Process stack-limiting command-line options. */
6246 if (opt_fstack_limit_symbol_arg != NULL)
6247 stack_limit_rtx
6248 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6249 if (opt_fstack_limit_register_no >= 0)
6250 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6251
a2a8cc44 6252 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 6253 tries to use these variables. */
23b2ce53 6254 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 6255 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 6256 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 6257
68d75312
JC
6258 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6259 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 6260 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 6261 else
3b80f6ca 6262 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 6263
857c7b46 6264 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
aa3a12d6 6265
807e902e
KZ
6266 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6267 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6268 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
6269
6270 dconstm1 = dconst1;
6271 dconstm1.sign = 1;
03f2ea93
RS
6272
6273 dconsthalf = dconst1;
1e92bbb9 6274 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 6275
e7c82a99 6276 for (i = 0; i < 3; i++)
23b2ce53 6277 {
aefa9d43 6278 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
6279 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6280
c94843d2 6281 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
15ed7b52 6282 const_tiny_rtx[i][(int) mode] =
555affd7 6283 const_double_from_real_value (*r, mode);
15ed7b52 6284
c94843d2 6285 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
5692c7bc 6286 const_tiny_rtx[i][(int) mode] =
555affd7 6287 const_double_from_real_value (*r, mode);
23b2ce53 6288
906c4e36 6289 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 6290
c94843d2 6291 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
906c4e36 6292 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 6293
ede6c734
MS
6294 for (mode = MIN_MODE_PARTIAL_INT;
6295 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 6296 mode = (machine_mode)((int)(mode) + 1))
33d3e559 6297 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
6298 }
6299
e7c82a99
JJ
6300 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6301
c94843d2 6302 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
e7c82a99
JJ
6303 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6304
5c0caeb3
RS
6305 /* For BImode, 1 and -1 are unsigned and signed interpretations
6306 of the same value. */
6307 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6308 const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6309 const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6310
ede6c734
MS
6311 for (mode = MIN_MODE_PARTIAL_INT;
6312 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 6313 mode = (machine_mode)((int)(mode) + 1))
c8a89d2a 6314 const_tiny_rtx[3][(int) mode] = constm1_rtx;
c94843d2
RS
6315
6316 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
e90721b1
AP
6317 {
6318 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6319 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6320 }
6321
c94843d2 6322 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
e90721b1
AP
6323 {
6324 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6325 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6326 }
6327
5c0caeb3
RS
6328 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6329 interpretations of the same value. */
6330 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6331 {
6332 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6333 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6334 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6335 }
6336
c94843d2 6337 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
a73b091d
JW
6338 {
6339 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6340 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 6341 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 6342 }
69ef87e2 6343
c94843d2 6344 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
a73b091d
JW
6345 {
6346 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6347 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6348 }
69ef87e2 6349
16d22000 6350 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
325217ed 6351 {
16d22000
RS
6352 scalar_mode smode = smode_iter.require ();
6353 FCONST0 (smode).data.high = 0;
6354 FCONST0 (smode).data.low = 0;
6355 FCONST0 (smode).mode = smode;
6356 const_tiny_rtx[0][(int) smode]
6357 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6358 }
6359
16d22000 6360 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
325217ed 6361 {
16d22000
RS
6362 scalar_mode smode = smode_iter.require ();
6363 FCONST0 (smode).data.high = 0;
6364 FCONST0 (smode).data.low = 0;
6365 FCONST0 (smode).mode = smode;
6366 const_tiny_rtx[0][(int) smode]
6367 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6368 }
6369
16d22000 6370 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
325217ed 6371 {
16d22000
RS
6372 scalar_mode smode = smode_iter.require ();
6373 FCONST0 (smode).data.high = 0;
6374 FCONST0 (smode).data.low = 0;
6375 FCONST0 (smode).mode = smode;
6376 const_tiny_rtx[0][(int) smode]
6377 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6378
6379 /* We store the value 1. */
16d22000
RS
6380 FCONST1 (smode).data.high = 0;
6381 FCONST1 (smode).data.low = 0;
6382 FCONST1 (smode).mode = smode;
6383 FCONST1 (smode).data
6384 = double_int_one.lshift (GET_MODE_FBIT (smode),
9be0ac8c 6385 HOST_BITS_PER_DOUBLE_INT,
16d22000
RS
6386 SIGNED_FIXED_POINT_MODE_P (smode));
6387 const_tiny_rtx[1][(int) smode]
6388 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
325217ed
CF
6389 }
6390
16d22000 6391 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
325217ed 6392 {
16d22000
RS
6393 scalar_mode smode = smode_iter.require ();
6394 FCONST0 (smode).data.high = 0;
6395 FCONST0 (smode).data.low = 0;
6396 FCONST0 (smode).mode = smode;
6397 const_tiny_rtx[0][(int) smode]
6398 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6399
6400 /* We store the value 1. */
16d22000
RS
6401 FCONST1 (smode).data.high = 0;
6402 FCONST1 (smode).data.low = 0;
6403 FCONST1 (smode).mode = smode;
6404 FCONST1 (smode).data
6405 = double_int_one.lshift (GET_MODE_FBIT (smode),
9be0ac8c 6406 HOST_BITS_PER_DOUBLE_INT,
16d22000
RS
6407 SIGNED_FIXED_POINT_MODE_P (smode));
6408 const_tiny_rtx[1][(int) smode]
6409 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
091a3ac7
CF
6410 }
6411
c94843d2 6412 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
091a3ac7
CF
6413 {
6414 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6415 }
6416
c94843d2 6417 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
091a3ac7
CF
6418 {
6419 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6420 }
6421
c94843d2 6422 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
091a3ac7
CF
6423 {
6424 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6425 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6426 }
6427
c94843d2 6428 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
091a3ac7
CF
6429 {
6430 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6431 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6432 }
6433
dbbbbf3b 6434 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
ef4bddc2 6435 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
dbbbbf3b 6436 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6437
ca4adc91
RS
6438 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6439 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6440 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6441 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
1476d1bd
MM
6442 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6443 /*prev_insn=*/NULL,
6444 /*next_insn=*/NULL,
6445 /*bb=*/NULL,
6446 /*pattern=*/NULL_RTX,
6447 /*location=*/-1,
6448 CODE_FOR_nothing,
6449 /*reg_notes=*/NULL_RTX);
23b2ce53 6450}
a11759a3 6451\f
969d70ca
JH
6452/* Produce exact duplicate of insn INSN after AFTER.
6453 Care updating of libcall regions if present. */
6454
cd459bf8 6455rtx_insn *
a1950df3 6456emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
969d70ca 6457{
cd459bf8
DM
6458 rtx_insn *new_rtx;
6459 rtx link;
969d70ca
JH
6460
6461 switch (GET_CODE (insn))
6462 {
6463 case INSN:
60564289 6464 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6465 break;
6466
6467 case JUMP_INSN:
60564289 6468 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6469 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6470 break;
6471
b5b8b0ac
AO
6472 case DEBUG_INSN:
6473 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6474 break;
6475
969d70ca 6476 case CALL_INSN:
60564289 6477 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6478 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6479 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6480 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6481 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6482 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6483 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6484 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6485 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6486 break;
6487
6488 default:
5b0264cb 6489 gcc_unreachable ();
969d70ca
JH
6490 }
6491
6492 /* Update LABEL_NUSES. */
60564289 6493 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6494
5368224f 6495 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6496
0a3d71f5
JW
6497 /* If the old insn is frame related, then so is the new one. This is
6498 primarily needed for IA-64 unwind info which marks epilogue insns,
6499 which may be duplicated by the basic block reordering code. */
60564289 6500 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6501
1581a12c
BS
6502 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6503 rtx *ptail = &REG_NOTES (new_rtx);
6504 while (*ptail != NULL_RTX)
6505 ptail = &XEXP (*ptail, 1);
6506
cf7c4aa6
HPN
6507 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6508 will make them. REG_LABEL_TARGETs are created there too, but are
6509 supposed to be sticky, so we copy them. */
969d70ca 6510 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6511 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca 6512 {
1581a12c
BS
6513 *ptail = duplicate_reg_note (link);
6514 ptail = &XEXP (*ptail, 1);
969d70ca
JH
6515 }
6516
60564289
KG
6517 INSN_CODE (new_rtx) = INSN_CODE (insn);
6518 return new_rtx;
969d70ca 6519}
e2500fed 6520
1431042e 6521static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d 6522rtx
ef4bddc2 6523gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
3e89ed8d
JH
6524{
6525 if (hard_reg_clobbers[mode][regno])
6526 return hard_reg_clobbers[mode][regno];
6527 else
6528 return (hard_reg_clobbers[mode][regno] =
6529 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6530}
6531
14196e02
AH
6532static GTY((deletable)) rtx
6533hard_reg_clobbers_high[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6534
6535/* Return a CLOBBER_HIGH expression for register REGNO that clobbers MODE,
6536 caching into HARD_REG_CLOBBERS_HIGH. */
6537rtx
6538gen_hard_reg_clobber_high (machine_mode mode, unsigned int regno)
6539{
6540 if (hard_reg_clobbers_high[mode][regno])
6541 return hard_reg_clobbers_high[mode][regno];
6542 else
6543 return (hard_reg_clobbers_high[mode][regno]
6544 = gen_rtx_CLOBBER_HIGH (VOIDmode, gen_rtx_REG (mode, regno)));
6545}
6546
5368224f
DC
6547location_t prologue_location;
6548location_t epilogue_location;
78bde837
SB
6549
6550/* Hold current location information and last location information, so the
6551 datastructures are built lazily only when some instructions in given
6552 place are needed. */
3a50da34 6553static location_t curr_location;
78bde837 6554
5368224f 6555/* Allocate insn location datastructure. */
78bde837 6556void
5368224f 6557insn_locations_init (void)
78bde837 6558{
5368224f 6559 prologue_location = epilogue_location = 0;
78bde837 6560 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6561}
6562
6563/* At the end of emit stage, clear current location. */
6564void
5368224f 6565insn_locations_finalize (void)
78bde837 6566{
5368224f
DC
6567 epilogue_location = curr_location;
6568 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6569}
6570
6571/* Set current location. */
6572void
5368224f 6573set_curr_insn_location (location_t location)
78bde837 6574{
78bde837
SB
6575 curr_location = location;
6576}
6577
6578/* Get current location. */
6579location_t
5368224f 6580curr_insn_location (void)
78bde837
SB
6581{
6582 return curr_location;
6583}
6584
78bde837
SB
6585/* Return lexical scope block insn belongs to. */
6586tree
a1950df3 6587insn_scope (const rtx_insn *insn)
78bde837 6588{
5368224f 6589 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6590}
6591
6592/* Return line number of the statement that produced this insn. */
6593int
a1950df3 6594insn_line (const rtx_insn *insn)
78bde837 6595{
5368224f 6596 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6597}
6598
6599/* Return source file of the statement that produced this insn. */
6600const char *
a1950df3 6601insn_file (const rtx_insn *insn)
78bde837 6602{
5368224f 6603 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6604}
8930883e 6605
ffa4602f
EB
6606/* Return expanded location of the statement that produced this insn. */
6607expanded_location
a1950df3 6608insn_location (const rtx_insn *insn)
ffa4602f
EB
6609{
6610 return expand_location (INSN_LOCATION (insn));
6611}
6612
8930883e
MK
6613/* Return true if memory model MODEL requires a pre-operation (release-style)
6614 barrier or a post-operation (acquire-style) barrier. While not universal,
6615 this function matches behavior of several targets. */
6616
6617bool
6618need_atomic_barrier_p (enum memmodel model, bool pre)
6619{
40ad260d 6620 switch (model & MEMMODEL_BASE_MASK)
8930883e
MK
6621 {
6622 case MEMMODEL_RELAXED:
6623 case MEMMODEL_CONSUME:
6624 return false;
6625 case MEMMODEL_RELEASE:
6626 return pre;
6627 case MEMMODEL_ACQUIRE:
6628 return !pre;
6629 case MEMMODEL_ACQ_REL:
6630 case MEMMODEL_SEQ_CST:
6631 return true;
6632 default:
6633 gcc_unreachable ();
6634 }
6635}
8194c537 6636
abd3c800
RS
6637/* Return a constant shift amount for shifting a value of mode MODE
6638 by VALUE bits. */
6639
6640rtx
0c12fc9b 6641gen_int_shift_amount (machine_mode, poly_int64 value)
abd3c800
RS
6642{
6643 /* Use a 64-bit mode, to avoid any truncation.
6644
6645 ??? Perhaps this should be automatically derived from the .md files
6646 instead, or perhaps have a target hook. */
6647 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6648 ? DImode
6649 : int_mode_for_size (64, 0).require ());
6650 return gen_int_mode (value, shift_mode);
6651}
6652
8194c537
DM
6653/* Initialize fields of rtl_data related to stack alignment. */
6654
6655void
6656rtl_data::init_stack_alignment ()
6657{
6658 stack_alignment_needed = STACK_BOUNDARY;
6659 max_used_stack_slot_alignment = STACK_BOUNDARY;
6660 stack_alignment_estimated = 0;
6661 preferred_stack_boundary = STACK_BOUNDARY;
6662}
6663
8930883e 6664\f
e2500fed 6665#include "gt-emit-rtl.h"