]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
Add gen_(const_)vec_duplicate helpers
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
cbe34bb5 2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6 36#include "coretypes.h"
4d0cdd0c 37#include "memmodel.h"
c7131fb2 38#include "backend.h"
957060b5 39#include "target.h"
23b2ce53 40#include "rtl.h"
957060b5 41#include "tree.h"
c7131fb2 42#include "df.h"
957060b5
AM
43#include "tm_p.h"
44#include "stringpool.h"
957060b5
AM
45#include "insn-config.h"
46#include "regs.h"
47#include "emit-rtl.h"
48#include "recog.h"
c7131fb2 49#include "diagnostic-core.h"
40e23961 50#include "alias.h"
40e23961 51#include "fold-const.h"
d8a2d370 52#include "varasm.h"
60393bbc 53#include "cfgrtl.h"
60393bbc 54#include "tree-eh.h"
36566b39 55#include "explow.h"
23b2ce53 56#include "expr.h"
b5b8b0ac 57#include "params.h"
9b2b7279 58#include "builtins.h"
9021b8ec 59#include "rtl-iter.h"
1f9ceff1 60#include "stor-layout.h"
ecf835e9 61#include "opts.h"
5fa396ad 62#include "predict.h"
ca695ac9 63
5fb0e246
RS
64struct target_rtl default_target_rtl;
65#if SWITCHABLE_TARGET
66struct target_rtl *this_target_rtl = &default_target_rtl;
67#endif
68
69#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
1d445e9e
ILT
71/* Commonly used modes. */
72
501623d4
RS
73scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 76
bd60bab2
JH
77/* Datastructures maintained for currently processed function in RTL form. */
78
3e029763 79struct rtl_data x_rtl;
bd60bab2
JH
80
81/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 82 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86rtx * regno_reg_rtx;
23b2ce53
RS
87
88/* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
044b4de3 91static GTY(()) int label_num = 1;
23b2ce53 92
23b2ce53
RS
93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 97
e7c82a99 98rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 99
68d75312
JC
100rtx const_true_rtx;
101
23b2ce53
RS
102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
03f2ea93 106REAL_VALUE_TYPE dconsthalf;
23b2ce53 107
325217ed
CF
108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
23b2ce53
RS
112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
5da077de 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 118
ca4adc91
RS
119/* Standard pieces of rtx, to be substituted directly into things. */
120rtx pc_rtx;
121rtx ret_rtx;
122rtx simple_return_rtx;
123rtx cc0_rtx;
124
1476d1bd
MM
125/* Marker used for denoting an INSN, which should never be accessed (i.e.,
126 this pointer should normally never be dereferenced), but is required to be
127 distinct from NULL_RTX. Currently used by peephole2 pass. */
128rtx_insn *invalid_insn_rtx;
129
c13e8210
MM
130/* A hash table storing CONST_INTs whose absolute value is greater
131 than MAX_SAVED_CONST_INT. */
132
6c907cff 133struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
134{
135 typedef HOST_WIDE_INT compare_type;
136
137 static hashval_t hash (rtx i);
138 static bool equal (rtx i, HOST_WIDE_INT h);
139};
c13e8210 140
aebf76a2
TS
141static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
142
6c907cff 143struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
144{
145 static hashval_t hash (rtx x);
146 static bool equal (rtx x, rtx y);
147};
148
149static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
807e902e 150
a560d4d4 151/* A hash table storing register attribute structures. */
6c907cff 152struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
aebf76a2
TS
153{
154 static hashval_t hash (reg_attrs *x);
155 static bool equal (reg_attrs *a, reg_attrs *b);
156};
157
158static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
a560d4d4 159
5692c7bc 160/* A hash table storing all CONST_DOUBLEs. */
6c907cff 161struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
162{
163 static hashval_t hash (rtx x);
164 static bool equal (rtx x, rtx y);
165};
166
167static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
5692c7bc 168
091a3ac7 169/* A hash table storing all CONST_FIXEDs. */
6c907cff 170struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
171{
172 static hashval_t hash (rtx x);
173 static bool equal (rtx x, rtx y);
174};
175
176static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
091a3ac7 177
3e029763 178#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 179#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 180#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 181
5eb2a9f2 182static void set_used_decls (tree);
502b8322 183static void mark_label_nuses (rtx);
807e902e 184#if TARGET_SUPPORTS_WIDE_INT
807e902e
KZ
185static rtx lookup_const_wide_int (rtx);
186#endif
502b8322 187static rtx lookup_const_double (rtx);
091a3ac7 188static rtx lookup_const_fixed (rtx);
502b8322 189static reg_attrs *get_reg_attrs (tree, int);
ef4bddc2 190static rtx gen_const_vector (machine_mode, int);
32b32b16 191static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 192
5fa396ad
JH
193/* Probability of the conditional branch currently proceeded by try_split. */
194profile_probability split_branch_probability;
ca695ac9 195\f
c13e8210
MM
196/* Returns a hash code for X (which is a really a CONST_INT). */
197
aebf76a2
TS
198hashval_t
199const_int_hasher::hash (rtx x)
c13e8210 200{
aebf76a2 201 return (hashval_t) INTVAL (x);
c13e8210
MM
202}
203
cc2902df 204/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
205 CONST_INT) is the same as that given by Y (which is really a
206 HOST_WIDE_INT *). */
207
aebf76a2
TS
208bool
209const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
c13e8210 210{
aebf76a2 211 return (INTVAL (x) == y);
5692c7bc
ZW
212}
213
807e902e
KZ
214#if TARGET_SUPPORTS_WIDE_INT
215/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
216
aebf76a2
TS
217hashval_t
218const_wide_int_hasher::hash (rtx x)
807e902e
KZ
219{
220 int i;
d7ca26e4 221 unsigned HOST_WIDE_INT hash = 0;
aebf76a2 222 const_rtx xr = x;
807e902e
KZ
223
224 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
225 hash += CONST_WIDE_INT_ELT (xr, i);
226
227 return (hashval_t) hash;
228}
229
230/* Returns nonzero if the value represented by X (which is really a
231 CONST_WIDE_INT) is the same as that given by Y (which is really a
232 CONST_WIDE_INT). */
233
aebf76a2
TS
234bool
235const_wide_int_hasher::equal (rtx x, rtx y)
807e902e
KZ
236{
237 int i;
aebf76a2
TS
238 const_rtx xr = x;
239 const_rtx yr = y;
807e902e 240 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
aebf76a2 241 return false;
807e902e
KZ
242
243 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
244 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
aebf76a2 245 return false;
807e902e 246
aebf76a2 247 return true;
807e902e
KZ
248}
249#endif
250
5692c7bc 251/* Returns a hash code for X (which is really a CONST_DOUBLE). */
aebf76a2
TS
252hashval_t
253const_double_hasher::hash (rtx x)
5692c7bc 254{
aebf76a2 255 const_rtx const value = x;
46b33600 256 hashval_t h;
5692c7bc 257
807e902e 258 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
259 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
260 else
fe352c29 261 {
15c812e3 262 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
263 /* MODE is used in the comparison, so it should be in the hash. */
264 h ^= GET_MODE (value);
265 }
5692c7bc
ZW
266 return h;
267}
268
cc2902df 269/* Returns nonzero if the value represented by X (really a ...)
5692c7bc 270 is the same as that represented by Y (really a ...) */
aebf76a2
TS
271bool
272const_double_hasher::equal (rtx x, rtx y)
5692c7bc 273{
aebf76a2 274 const_rtx const a = x, b = y;
5692c7bc
ZW
275
276 if (GET_MODE (a) != GET_MODE (b))
277 return 0;
807e902e 278 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
279 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
280 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
281 else
282 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
283 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
284}
285
091a3ac7
CF
286/* Returns a hash code for X (which is really a CONST_FIXED). */
287
aebf76a2
TS
288hashval_t
289const_fixed_hasher::hash (rtx x)
091a3ac7 290{
aebf76a2 291 const_rtx const value = x;
091a3ac7
CF
292 hashval_t h;
293
294 h = fixed_hash (CONST_FIXED_VALUE (value));
295 /* MODE is used in the comparison, so it should be in the hash. */
296 h ^= GET_MODE (value);
297 return h;
298}
299
aebf76a2
TS
300/* Returns nonzero if the value represented by X is the same as that
301 represented by Y. */
091a3ac7 302
aebf76a2
TS
303bool
304const_fixed_hasher::equal (rtx x, rtx y)
091a3ac7 305{
aebf76a2 306 const_rtx const a = x, b = y;
091a3ac7
CF
307
308 if (GET_MODE (a) != GET_MODE (b))
309 return 0;
310 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
311}
312
f12144dd 313/* Return true if the given memory attributes are equal. */
c13e8210 314
96b3c03f 315bool
f12144dd 316mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 317{
96b3c03f
RB
318 if (p == q)
319 return true;
320 if (!p || !q)
321 return false;
754c3d5d
RS
322 return (p->alias == q->alias
323 && p->offset_known_p == q->offset_known_p
324 && (!p->offset_known_p || p->offset == q->offset)
325 && p->size_known_p == q->size_known_p
326 && (!p->size_known_p || p->size == q->size)
327 && p->align == q->align
09e881c9 328 && p->addrspace == q->addrspace
78b76d08
SB
329 && (p->expr == q->expr
330 || (p->expr != NULL_TREE && q->expr != NULL_TREE
331 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
332}
333
f12144dd 334/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 335
f12144dd
RS
336static void
337set_mem_attrs (rtx mem, mem_attrs *attrs)
338{
f12144dd
RS
339 /* If everything is the default, we can just clear the attributes. */
340 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
341 {
342 MEM_ATTRS (mem) = 0;
343 return;
344 }
173b24b9 345
84053e02
RB
346 if (!MEM_ATTRS (mem)
347 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 348 {
766090c2 349 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 350 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 351 }
c13e8210
MM
352}
353
a560d4d4
JH
354/* Returns a hash code for X (which is a really a reg_attrs *). */
355
aebf76a2
TS
356hashval_t
357reg_attr_hasher::hash (reg_attrs *x)
a560d4d4 358{
aebf76a2 359 const reg_attrs *const p = x;
a560d4d4 360
9841210f 361 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
362}
363
aebf76a2
TS
364/* Returns nonzero if the value represented by X is the same as that given by
365 Y. */
a560d4d4 366
aebf76a2
TS
367bool
368reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
a560d4d4 369{
aebf76a2
TS
370 const reg_attrs *const p = x;
371 const reg_attrs *const q = y;
a560d4d4
JH
372
373 return (p->decl == q->decl && p->offset == q->offset);
374}
375/* Allocate a new reg_attrs structure and insert it into the hash table if
376 one identical to it is not already in the table. We are doing this for
377 MEM of mode MODE. */
378
379static reg_attrs *
502b8322 380get_reg_attrs (tree decl, int offset)
a560d4d4
JH
381{
382 reg_attrs attrs;
a560d4d4
JH
383
384 /* If everything is the default, we can just return zero. */
385 if (decl == 0 && offset == 0)
386 return 0;
387
388 attrs.decl = decl;
389 attrs.offset = offset;
390
aebf76a2 391 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
a560d4d4
JH
392 if (*slot == 0)
393 {
766090c2 394 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
395 memcpy (*slot, &attrs, sizeof (reg_attrs));
396 }
397
aebf76a2 398 return *slot;
a560d4d4
JH
399}
400
6fb5fa3c
DB
401
402#if !HAVE_blockage
adddc347
HPN
403/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
404 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
405
406rtx
407gen_blockage (void)
408{
409 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
410 MEM_VOLATILE_P (x) = true;
411 return x;
412}
413#endif
414
415
8deccbb7
RS
416/* Set the mode and register number of X to MODE and REGNO. */
417
418void
419set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
420{
9188b286 421 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
ad474626 422 ? hard_regno_nregs (regno, mode)
9188b286 423 : 1);
8deccbb7 424 PUT_MODE_RAW (x, mode);
9188b286 425 set_regno_raw (x, regno, nregs);
8deccbb7
RS
426}
427
08394eef
BS
428/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
429 don't attempt to share with the various global pieces of rtl (such as
430 frame_pointer_rtx). */
431
432rtx
8deccbb7 433gen_raw_REG (machine_mode mode, unsigned int regno)
08394eef 434{
84c2ad23 435 rtx x = rtx_alloc (REG MEM_STAT_INFO);
8deccbb7 436 set_mode_and_regno (x, mode, regno);
9fccb335 437 REG_ATTRS (x) = NULL;
08394eef
BS
438 ORIGINAL_REGNO (x) = regno;
439 return x;
440}
441
c5c76735
JL
442/* There are some RTL codes that require special attention; the generation
443 functions do the raw handling. If you add to this list, modify
444 special_rtx in gengenrtl.c as well. */
445
38e60c55 446rtx_expr_list *
ef4bddc2 447gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
38e60c55
DM
448{
449 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
450 expr_list));
451}
452
a756c6be 453rtx_insn_list *
ef4bddc2 454gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
a756c6be
DM
455{
456 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
457 insn_list));
458}
459
d6e1e8b8 460rtx_insn *
ef4bddc2 461gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
d6e1e8b8
DM
462 basic_block bb, rtx pattern, int location, int code,
463 rtx reg_notes)
464{
465 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
466 prev_insn, next_insn,
467 bb, pattern, location, code,
468 reg_notes));
469}
470
3b80f6ca 471rtx
ef4bddc2 472gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca
RH
473{
474 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 475 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
476
477#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
478 if (const_true_rtx && arg == STORE_FLAG_VALUE)
479 return const_true_rtx;
480#endif
481
c13e8210 482 /* Look up the CONST_INT in the hash table. */
aebf76a2
TS
483 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
484 INSERT);
29105cea 485 if (*slot == 0)
1f8f4a0b 486 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210 487
aebf76a2 488 return *slot;
3b80f6ca
RH
489}
490
2496c7bd 491rtx
ef4bddc2 492gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
2496c7bd
LB
493{
494 return GEN_INT (trunc_int_for_mode (c, mode));
495}
496
5692c7bc
ZW
497/* CONST_DOUBLEs might be created from pairs of integers, or from
498 REAL_VALUE_TYPEs. Also, their length is known only at run time,
499 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
500
501/* Determine whether REAL, a CONST_DOUBLE, already exists in the
502 hash table. If so, return its counterpart; otherwise add it
503 to the hash table and return it. */
504static rtx
502b8322 505lookup_const_double (rtx real)
5692c7bc 506{
aebf76a2 507 rtx *slot = const_double_htab->find_slot (real, INSERT);
5692c7bc
ZW
508 if (*slot == 0)
509 *slot = real;
510
aebf76a2 511 return *slot;
5692c7bc 512}
29105cea 513
5692c7bc
ZW
514/* Return a CONST_DOUBLE rtx for a floating-point value specified by
515 VALUE in mode MODE. */
0133b7d9 516rtx
ef4bddc2 517const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
0133b7d9 518{
5692c7bc
ZW
519 rtx real = rtx_alloc (CONST_DOUBLE);
520 PUT_MODE (real, mode);
521
9e254451 522 real->u.rv = value;
5692c7bc
ZW
523
524 return lookup_const_double (real);
525}
526
091a3ac7
CF
527/* Determine whether FIXED, a CONST_FIXED, already exists in the
528 hash table. If so, return its counterpart; otherwise add it
529 to the hash table and return it. */
530
531static rtx
532lookup_const_fixed (rtx fixed)
533{
aebf76a2 534 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
091a3ac7
CF
535 if (*slot == 0)
536 *slot = fixed;
537
aebf76a2 538 return *slot;
091a3ac7
CF
539}
540
541/* Return a CONST_FIXED rtx for a fixed-point value specified by
542 VALUE in mode MODE. */
543
544rtx
ef4bddc2 545const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
091a3ac7
CF
546{
547 rtx fixed = rtx_alloc (CONST_FIXED);
548 PUT_MODE (fixed, mode);
549
550 fixed->u.fv = value;
551
552 return lookup_const_fixed (fixed);
553}
554
807e902e 555#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
556/* Constructs double_int from rtx CST. */
557
558double_int
559rtx_to_double_int (const_rtx cst)
560{
561 double_int r;
562
563 if (CONST_INT_P (cst))
27bcd47c 564 r = double_int::from_shwi (INTVAL (cst));
48175537 565 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
566 {
567 r.low = CONST_DOUBLE_LOW (cst);
568 r.high = CONST_DOUBLE_HIGH (cst);
569 }
570 else
571 gcc_unreachable ();
572
573 return r;
574}
807e902e 575#endif
3e93ff81 576
807e902e
KZ
577#if TARGET_SUPPORTS_WIDE_INT
578/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
579 If so, return its counterpart; otherwise add it to the hash table and
580 return it. */
3e93ff81 581
807e902e
KZ
582static rtx
583lookup_const_wide_int (rtx wint)
584{
aebf76a2 585 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
807e902e
KZ
586 if (*slot == 0)
587 *slot = wint;
588
aebf76a2 589 return *slot;
807e902e
KZ
590}
591#endif
592
593/* Return an rtx constant for V, given that the constant has mode MODE.
594 The returned rtx will be a CONST_INT if V fits, otherwise it will be
595 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
596 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0
AS
597
598rtx
ef4bddc2 599immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
54fb1ae0 600{
807e902e 601 unsigned int len = v.get_len ();
db61b7f9
RS
602 /* Not scalar_int_mode because we also allow pointer bound modes. */
603 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
807e902e
KZ
604
605 /* Allow truncation but not extension since we do not know if the
606 number is signed or unsigned. */
607 gcc_assert (prec <= v.get_precision ());
608
609 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
610 return gen_int_mode (v.elt (0), mode);
611
612#if TARGET_SUPPORTS_WIDE_INT
613 {
614 unsigned int i;
615 rtx value;
616 unsigned int blocks_needed
617 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
618
619 if (len > blocks_needed)
620 len = blocks_needed;
621
622 value = const_wide_int_alloc (len);
623
624 /* It is so tempting to just put the mode in here. Must control
625 myself ... */
626 PUT_MODE (value, VOIDmode);
627 CWI_PUT_NUM_ELEM (value, len);
628
629 for (i = 0; i < len; i++)
630 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
631
632 return lookup_const_wide_int (value);
633 }
634#else
635 return immed_double_const (v.elt (0), v.elt (1), mode);
636#endif
54fb1ae0
AS
637}
638
807e902e 639#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
640/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
641 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 642 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
643 implied upper bits are copies of the high bit of i1. The value
644 itself is neither signed nor unsigned. Do not use this routine for
645 non-integer modes; convert to REAL_VALUE_TYPE and use
555affd7 646 const_double_from_real_value. */
5692c7bc
ZW
647
648rtx
ef4bddc2 649immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
5692c7bc
ZW
650{
651 rtx value;
652 unsigned int i;
653
65acccdd 654 /* There are the following cases (note that there are no modes with
49ab6098 655 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
656
657 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
658 gen_int_mode.
929e10f4
MS
659 2) If the value of the integer fits into HOST_WIDE_INT anyway
660 (i.e., i1 consists only from copies of the sign bit, and sign
661 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 662 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
db61b7f9
RS
663 scalar_mode smode;
664 if (is_a <scalar_mode> (mode, &smode)
665 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
666 return gen_int_mode (i0, mode);
5692c7bc
ZW
667
668 /* If this integer fits in one word, return a CONST_INT. */
669 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
670 return GEN_INT (i0);
671
672 /* We use VOIDmode for integers. */
673 value = rtx_alloc (CONST_DOUBLE);
674 PUT_MODE (value, VOIDmode);
675
676 CONST_DOUBLE_LOW (value) = i0;
677 CONST_DOUBLE_HIGH (value) = i1;
678
679 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
680 XWINT (value, i) = 0;
681
682 return lookup_const_double (value);
0133b7d9 683}
807e902e 684#endif
0133b7d9 685
3b80f6ca 686rtx
ef4bddc2 687gen_rtx_REG (machine_mode mode, unsigned int regno)
3b80f6ca
RH
688{
689 /* In case the MD file explicitly references the frame pointer, have
690 all such references point to the same frame pointer. This is
691 used during frame pointer elimination to distinguish the explicit
692 references to these registers from pseudos that happened to be
693 assigned to them.
694
695 If we have eliminated the frame pointer or arg pointer, we will
696 be using it as a normal register, for example as a spill
697 register. In such cases, we might be accessing it in a mode that
698 is not Pmode and therefore cannot use the pre-allocated rtx.
699
700 Also don't do this when we are making new REGs in reload, since
701 we don't want to get confused with the real pointers. */
702
55a2c322 703 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 704 {
e10c79fe
LB
705 if (regno == FRAME_POINTER_REGNUM
706 && (!reload_completed || frame_pointer_needed))
3b80f6ca 707 return frame_pointer_rtx;
c3e08036
TS
708
709 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
710 && regno == HARD_FRAME_POINTER_REGNUM
e10c79fe 711 && (!reload_completed || frame_pointer_needed))
3b80f6ca 712 return hard_frame_pointer_rtx;
3f393fc6
TS
713#if !HARD_FRAME_POINTER_IS_ARG_POINTER
714 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
715 && regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
716 return arg_pointer_rtx;
717#endif
718#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 719 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
720 return return_address_pointer_rtx;
721#endif
fc555370 722 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 723 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 724 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 725 return pic_offset_table_rtx;
bcb33994 726 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
727 return stack_pointer_rtx;
728 }
729
006a94b0 730#if 0
6cde4876 731 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
732 an existing entry in that table to avoid useless generation of RTL.
733
734 This code is disabled for now until we can fix the various backends
735 which depend on having non-shared hard registers in some cases. Long
736 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
737 on the amount of useless RTL that gets generated.
738
739 We'll also need to fix some code that runs after reload that wants to
740 set ORIGINAL_REGNO. */
741
6cde4876
JL
742 if (cfun
743 && cfun->emit
744 && regno_reg_rtx
745 && regno < FIRST_PSEUDO_REGISTER
746 && reg_raw_mode[regno] == mode)
747 return regno_reg_rtx[regno];
006a94b0 748#endif
6cde4876 749
08394eef 750 return gen_raw_REG (mode, regno);
3b80f6ca
RH
751}
752
41472af8 753rtx
ef4bddc2 754gen_rtx_MEM (machine_mode mode, rtx addr)
41472af8
MM
755{
756 rtx rt = gen_rtx_raw_MEM (mode, addr);
757
758 /* This field is not cleared by the mere allocation of the rtx, so
759 we clear it here. */
173b24b9 760 MEM_ATTRS (rt) = 0;
41472af8
MM
761
762 return rt;
763}
ddef6bc7 764
542a8afa
RH
765/* Generate a memory referring to non-trapping constant memory. */
766
767rtx
ef4bddc2 768gen_const_mem (machine_mode mode, rtx addr)
542a8afa
RH
769{
770 rtx mem = gen_rtx_MEM (mode, addr);
771 MEM_READONLY_P (mem) = 1;
772 MEM_NOTRAP_P (mem) = 1;
773 return mem;
774}
775
bf877a76
R
776/* Generate a MEM referring to fixed portions of the frame, e.g., register
777 save areas. */
778
779rtx
ef4bddc2 780gen_frame_mem (machine_mode mode, rtx addr)
bf877a76
R
781{
782 rtx mem = gen_rtx_MEM (mode, addr);
783 MEM_NOTRAP_P (mem) = 1;
784 set_mem_alias_set (mem, get_frame_alias_set ());
785 return mem;
786}
787
788/* Generate a MEM referring to a temporary use of the stack, not part
789 of the fixed stack frame. For example, something which is pushed
790 by a target splitter. */
791rtx
ef4bddc2 792gen_tmp_stack_mem (machine_mode mode, rtx addr)
bf877a76
R
793{
794 rtx mem = gen_rtx_MEM (mode, addr);
795 MEM_NOTRAP_P (mem) = 1;
e3b5732b 796 if (!cfun->calls_alloca)
bf877a76
R
797 set_mem_alias_set (mem, get_frame_alias_set ());
798 return mem;
799}
800
beb72684
RH
801/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
802 this construct would be valid, and false otherwise. */
803
804bool
ef4bddc2 805validate_subreg (machine_mode omode, machine_mode imode,
ed7a4b4b 806 const_rtx reg, unsigned int offset)
ddef6bc7 807{
beb72684
RH
808 unsigned int isize = GET_MODE_SIZE (imode);
809 unsigned int osize = GET_MODE_SIZE (omode);
810
811 /* All subregs must be aligned. */
812 if (offset % osize != 0)
813 return false;
814
815 /* The subreg offset cannot be outside the inner object. */
816 if (offset >= isize)
817 return false;
818
819 /* ??? This should not be here. Temporarily continue to allow word_mode
820 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
821 Generally, backends are doing something sketchy but it'll take time to
822 fix them all. */
823 if (omode == word_mode)
824 ;
825 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
826 is the culprit here, and not the backends. */
827 else if (osize >= UNITS_PER_WORD && isize >= osize)
828 ;
829 /* Allow component subregs of complex and vector. Though given the below
830 extraction rules, it's not always clear what that means. */
831 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
832 && GET_MODE_INNER (imode) == omode)
833 ;
834 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
835 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
836 represent this. It's questionable if this ought to be represented at
837 all -- why can't this all be hidden in post-reload splitters that make
838 arbitrarily mode changes to the registers themselves. */
839 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
840 ;
841 /* Subregs involving floating point modes are not allowed to
842 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
843 (subreg:SI (reg:DF) 0) isn't. */
844 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
845 {
55a2c322
VM
846 if (! (isize == osize
847 /* LRA can use subreg to store a floating point value in
848 an integer mode. Although the floating point and the
849 integer modes need the same number of hard registers,
850 the size of floating point mode can be less than the
851 integer mode. LRA also uses subregs for a register
852 should be used in different mode in on insn. */
853 || lra_in_progress))
beb72684
RH
854 return false;
855 }
ddef6bc7 856
beb72684
RH
857 /* Paradoxical subregs must have offset zero. */
858 if (osize > isize)
859 return offset == 0;
860
861 /* This is a normal subreg. Verify that the offset is representable. */
862
863 /* For hard registers, we already have most of these rules collected in
864 subreg_offset_representable_p. */
865 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
866 {
867 unsigned int regno = REGNO (reg);
868
beb72684
RH
869 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
870 && GET_MODE_INNER (imode) == omode)
871 ;
0d803030 872 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
beb72684 873 return false;
beb72684
RH
874
875 return subreg_offset_representable_p (regno, imode, offset, omode);
876 }
877
878 /* For pseudo registers, we want most of the same checks. Namely:
879 If the register no larger than a word, the subreg must be lowpart.
880 If the register is larger than a word, the subreg must be the lowpart
881 of a subword. A subreg does *not* perform arbitrary bit extraction.
882 Given that we've already checked mode/offset alignment, we only have
883 to check subword subregs here. */
55a2c322
VM
884 if (osize < UNITS_PER_WORD
885 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684 886 {
ef4bddc2 887 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
beb72684
RH
888 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
889 if (offset % UNITS_PER_WORD != low_off)
890 return false;
891 }
892 return true;
893}
894
895rtx
ef4bddc2 896gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
beb72684
RH
897{
898 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 899 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
900}
901
173b24b9
RK
902/* Generate a SUBREG representing the least-significant part of REG if MODE
903 is smaller than mode of REG, otherwise paradoxical SUBREG. */
904
ddef6bc7 905rtx
ef4bddc2 906gen_lowpart_SUBREG (machine_mode mode, rtx reg)
ddef6bc7 907{
ef4bddc2 908 machine_mode inmode;
ddef6bc7
JJ
909
910 inmode = GET_MODE (reg);
911 if (inmode == VOIDmode)
912 inmode = mode;
e0e08ac2
JH
913 return gen_rtx_SUBREG (mode, reg,
914 subreg_lowpart_offset (mode, inmode));
ddef6bc7 915}
fcc74520
RS
916
917rtx
ef4bddc2 918gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
fcc74520
RS
919 enum var_init_status status)
920{
921 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
922 PAT_VAR_LOCATION_STATUS (x) = status;
923 return x;
924}
c5c76735 925\f
23b2ce53 926
80379f51
PB
927/* Create an rtvec and stores within it the RTXen passed in the arguments. */
928
23b2ce53 929rtvec
e34d07f2 930gen_rtvec (int n, ...)
23b2ce53 931{
80379f51
PB
932 int i;
933 rtvec rt_val;
e34d07f2 934 va_list p;
23b2ce53 935
e34d07f2 936 va_start (p, n);
23b2ce53 937
80379f51 938 /* Don't allocate an empty rtvec... */
23b2ce53 939 if (n == 0)
0edf1bb2
JL
940 {
941 va_end (p);
942 return NULL_RTVEC;
943 }
23b2ce53 944
80379f51 945 rt_val = rtvec_alloc (n);
4f90e4a0 946
23b2ce53 947 for (i = 0; i < n; i++)
80379f51 948 rt_val->elem[i] = va_arg (p, rtx);
6268b922 949
e34d07f2 950 va_end (p);
80379f51 951 return rt_val;
23b2ce53
RS
952}
953
954rtvec
502b8322 955gen_rtvec_v (int n, rtx *argp)
23b2ce53 956{
b3694847
SS
957 int i;
958 rtvec rt_val;
23b2ce53 959
80379f51 960 /* Don't allocate an empty rtvec... */
23b2ce53 961 if (n == 0)
80379f51 962 return NULL_RTVEC;
23b2ce53 963
80379f51 964 rt_val = rtvec_alloc (n);
23b2ce53
RS
965
966 for (i = 0; i < n; i++)
8f985ec4 967 rt_val->elem[i] = *argp++;
23b2ce53
RS
968
969 return rt_val;
970}
e6eda746
DM
971
972rtvec
973gen_rtvec_v (int n, rtx_insn **argp)
974{
975 int i;
976 rtvec rt_val;
977
978 /* Don't allocate an empty rtvec... */
979 if (n == 0)
980 return NULL_RTVEC;
981
982 rt_val = rtvec_alloc (n);
983
984 for (i = 0; i < n; i++)
985 rt_val->elem[i] = *argp++;
986
987 return rt_val;
988}
989
23b2ce53 990\f
38ae7651
RS
991/* Return the number of bytes between the start of an OUTER_MODE
992 in-memory value and the start of an INNER_MODE in-memory value,
993 given that the former is a lowpart of the latter. It may be a
994 paradoxical lowpart, in which case the offset will be negative
995 on big-endian targets. */
996
997int
ef4bddc2
RS
998byte_lowpart_offset (machine_mode outer_mode,
999 machine_mode inner_mode)
38ae7651 1000{
03a95621 1001 if (paradoxical_subreg_p (outer_mode, inner_mode))
38ae7651 1002 return -subreg_lowpart_offset (inner_mode, outer_mode);
03a95621
RS
1003 else
1004 return subreg_lowpart_offset (outer_mode, inner_mode);
38ae7651 1005}
3d09ba95
RS
1006
1007/* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1008 from address X. For paradoxical big-endian subregs this is a
1009 negative value, otherwise it's the same as OFFSET. */
1010
1011int
1012subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1013 unsigned int offset)
1014{
1015 if (paradoxical_subreg_p (outer_mode, inner_mode))
1016 {
1017 gcc_assert (offset == 0);
1018 return -subreg_lowpart_offset (inner_mode, outer_mode);
1019 }
1020 return offset;
1021}
1022
1023/* As above, but return the offset that existing subreg X would have
1024 if SUBREG_REG (X) were stored in memory. The only significant thing
1025 about the current SUBREG_REG is its mode. */
1026
1027int
1028subreg_memory_offset (const_rtx x)
1029{
1030 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1031 SUBREG_BYTE (x));
1032}
38ae7651 1033\f
23b2ce53
RS
1034/* Generate a REG rtx for a new pseudo register of mode MODE.
1035 This pseudo is assigned the next sequential register number. */
1036
1037rtx
ef4bddc2 1038gen_reg_rtx (machine_mode mode)
23b2ce53 1039{
b3694847 1040 rtx val;
2e3f842f 1041 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 1042
f8335a4f 1043 gcc_assert (can_create_pseudo_p ());
23b2ce53 1044
2e3f842f
L
1045 /* If a virtual register with bigger mode alignment is generated,
1046 increase stack alignment estimation because it might be spilled
1047 to stack later. */
b8698a0f 1048 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
1049 && crtl->stack_alignment_estimated < align
1050 && !crtl->stack_realign_processed)
ae58e548
JJ
1051 {
1052 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1053 if (crtl->stack_alignment_estimated < min_align)
1054 crtl->stack_alignment_estimated = min_align;
1055 }
2e3f842f 1056
1b3d8f8a
GK
1057 if (generating_concat_p
1058 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1059 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
1060 {
1061 /* For complex modes, don't make a single pseudo.
1062 Instead, make a CONCAT of two pseudos.
1063 This allows noncontiguous allocation of the real and imaginary parts,
1064 which makes much better code. Besides, allocating DCmode
1065 pseudos overstrains reload on some machines like the 386. */
1066 rtx realpart, imagpart;
ef4bddc2 1067 machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1068
1069 realpart = gen_reg_rtx (partmode);
1070 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1071 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1072 }
1073
004a7e45
UB
1074 /* Do not call gen_reg_rtx with uninitialized crtl. */
1075 gcc_assert (crtl->emit.regno_pointer_align_length);
1076
f44986d7
DM
1077 crtl->emit.ensure_regno_capacity ();
1078 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
23b2ce53 1079
f44986d7
DM
1080 val = gen_raw_REG (mode, reg_rtx_no);
1081 regno_reg_rtx[reg_rtx_no++] = val;
1082 return val;
1083}
0d4903b8 1084
f44986d7
DM
1085/* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1086 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
49ad7cfa 1087
f44986d7
DM
1088void
1089emit_status::ensure_regno_capacity ()
1090{
1091 int old_size = regno_pointer_align_length;
23b2ce53 1092
f44986d7
DM
1093 if (reg_rtx_no < old_size)
1094 return;
23b2ce53 1095
f44986d7
DM
1096 int new_size = old_size * 2;
1097 while (reg_rtx_no >= new_size)
1098 new_size *= 2;
1099
1100 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1101 memset (tmp + old_size, 0, new_size - old_size);
1102 regno_pointer_align = (unsigned char *) tmp;
1103
1104 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1105 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1106 regno_reg_rtx = new1;
1107
1108 crtl->emit.regno_pointer_align_length = new_size;
23b2ce53
RS
1109}
1110
a698cc03
JL
1111/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1112
1113bool
1114reg_is_parm_p (rtx reg)
1115{
1116 tree decl;
1117
1118 gcc_assert (REG_P (reg));
1119 decl = REG_EXPR (reg);
1120 return (decl && TREE_CODE (decl) == PARM_DECL);
1121}
1122
38ae7651
RS
1123/* Update NEW with the same attributes as REG, but with OFFSET added
1124 to the REG_OFFSET. */
a560d4d4 1125
e53a16e7 1126static void
60564289 1127update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 1128{
60564289 1129 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 1130 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1131}
1132
38ae7651
RS
1133/* Generate a register with same attributes as REG, but with OFFSET
1134 added to the REG_OFFSET. */
e53a16e7
ILT
1135
1136rtx
ef4bddc2 1137gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
e53a16e7
ILT
1138 int offset)
1139{
60564289 1140 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1141
60564289
KG
1142 update_reg_offset (new_rtx, reg, offset);
1143 return new_rtx;
e53a16e7
ILT
1144}
1145
1146/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1147 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1148
1149rtx
ef4bddc2 1150gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
e53a16e7 1151{
60564289 1152 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1153
60564289
KG
1154 update_reg_offset (new_rtx, reg, offset);
1155 return new_rtx;
a560d4d4
JH
1156}
1157
38ae7651
RS
1158/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1159 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1160
1161void
ef4bddc2 1162adjust_reg_mode (rtx reg, machine_mode mode)
a560d4d4 1163{
38ae7651
RS
1164 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1165 PUT_MODE (reg, mode);
1166}
1167
1168/* Copy REG's attributes from X, if X has any attributes. If REG and X
1169 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1170
1171void
1172set_reg_attrs_from_value (rtx reg, rtx x)
1173{
1174 int offset;
de6f3f7a
L
1175 bool can_be_reg_pointer = true;
1176
1177 /* Don't call mark_reg_pointer for incompatible pointer sign
1178 extension. */
1179 while (GET_CODE (x) == SIGN_EXTEND
1180 || GET_CODE (x) == ZERO_EXTEND
1181 || GET_CODE (x) == TRUNCATE
1182 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1183 {
2a870875
RS
1184#if defined(POINTERS_EXTEND_UNSIGNED)
1185 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
8d8e740c
BE
1186 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1187 || (paradoxical_subreg_p (x)
1188 && ! (SUBREG_PROMOTED_VAR_P (x)
1189 && SUBREG_CHECK_PROMOTED_SIGN (x,
1190 POINTERS_EXTEND_UNSIGNED))))
2a870875 1191 && !targetm.have_ptr_extend ())
de6f3f7a
L
1192 can_be_reg_pointer = false;
1193#endif
1194 x = XEXP (x, 0);
1195 }
38ae7651 1196
923ba36f
JJ
1197 /* Hard registers can be reused for multiple purposes within the same
1198 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1199 on them is wrong. */
1200 if (HARD_REGISTER_P (reg))
1201 return;
1202
38ae7651 1203 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1204 if (MEM_P (x))
1205 {
527210c4
RS
1206 if (MEM_OFFSET_KNOWN_P (x))
1207 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1208 MEM_OFFSET (x) + offset);
de6f3f7a 1209 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1210 mark_reg_pointer (reg, 0);
46b71b03
PB
1211 }
1212 else if (REG_P (x))
1213 {
1214 if (REG_ATTRS (x))
1215 update_reg_offset (reg, x, offset);
de6f3f7a 1216 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1217 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1218 }
1219}
1220
1221/* Generate a REG rtx for a new pseudo register, copying the mode
1222 and attributes from X. */
1223
1224rtx
1225gen_reg_rtx_and_attrs (rtx x)
1226{
1227 rtx reg = gen_reg_rtx (GET_MODE (x));
1228 set_reg_attrs_from_value (reg, x);
1229 return reg;
a560d4d4
JH
1230}
1231
9d18e06b
JZ
1232/* Set the register attributes for registers contained in PARM_RTX.
1233 Use needed values from memory attributes of MEM. */
1234
1235void
502b8322 1236set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1237{
f8cfc6aa 1238 if (REG_P (parm_rtx))
38ae7651 1239 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1240 else if (GET_CODE (parm_rtx) == PARALLEL)
1241 {
1242 /* Check for a NULL entry in the first slot, used to indicate that the
1243 parameter goes both on the stack and in registers. */
1244 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1245 for (; i < XVECLEN (parm_rtx, 0); i++)
1246 {
1247 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1248 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1249 REG_ATTRS (XEXP (x, 0))
1250 = get_reg_attrs (MEM_EXPR (mem),
1251 INTVAL (XEXP (x, 1)));
1252 }
1253 }
1254}
1255
38ae7651
RS
1256/* Set the REG_ATTRS for registers in value X, given that X represents
1257 decl T. */
a560d4d4 1258
4e3825db 1259void
38ae7651
RS
1260set_reg_attrs_for_decl_rtl (tree t, rtx x)
1261{
1f9ceff1
AO
1262 if (!t)
1263 return;
1264 tree tdecl = t;
38ae7651 1265 if (GET_CODE (x) == SUBREG)
fbe6ec81 1266 {
38ae7651
RS
1267 gcc_assert (subreg_lowpart_p (x));
1268 x = SUBREG_REG (x);
fbe6ec81 1269 }
f8cfc6aa 1270 if (REG_P (x))
38ae7651
RS
1271 REG_ATTRS (x)
1272 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1f9ceff1
AO
1273 DECL_P (tdecl)
1274 ? DECL_MODE (tdecl)
1275 : TYPE_MODE (TREE_TYPE (tdecl))));
a560d4d4
JH
1276 if (GET_CODE (x) == CONCAT)
1277 {
1278 if (REG_P (XEXP (x, 0)))
1279 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1280 if (REG_P (XEXP (x, 1)))
1281 REG_ATTRS (XEXP (x, 1))
1282 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1283 }
1284 if (GET_CODE (x) == PARALLEL)
1285 {
d4afac5b
JZ
1286 int i, start;
1287
1288 /* Check for a NULL entry, used to indicate that the parameter goes
1289 both on the stack and in registers. */
1290 if (XEXP (XVECEXP (x, 0, 0), 0))
1291 start = 0;
1292 else
1293 start = 1;
1294
1295 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1296 {
1297 rtx y = XVECEXP (x, 0, i);
1298 if (REG_P (XEXP (y, 0)))
1299 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1300 }
1301 }
1302}
1303
38ae7651
RS
1304/* Assign the RTX X to declaration T. */
1305
1306void
1307set_decl_rtl (tree t, rtx x)
1308{
1309 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1310 if (x)
1311 set_reg_attrs_for_decl_rtl (t, x);
1312}
1313
5141868d
RS
1314/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1315 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1316
1317void
5141868d 1318set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1319{
1320 DECL_INCOMING_RTL (t) = x;
5141868d 1321 if (x && !by_reference_p)
38ae7651
RS
1322 set_reg_attrs_for_decl_rtl (t, x);
1323}
1324
754fdcca
RK
1325/* Identify REG (which may be a CONCAT) as a user register. */
1326
1327void
502b8322 1328mark_user_reg (rtx reg)
754fdcca
RK
1329{
1330 if (GET_CODE (reg) == CONCAT)
1331 {
1332 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1333 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1334 }
754fdcca 1335 else
5b0264cb
NS
1336 {
1337 gcc_assert (REG_P (reg));
1338 REG_USERVAR_P (reg) = 1;
1339 }
754fdcca
RK
1340}
1341
86fe05e0
RK
1342/* Identify REG as a probable pointer register and show its alignment
1343 as ALIGN, if nonzero. */
23b2ce53
RS
1344
1345void
502b8322 1346mark_reg_pointer (rtx reg, int align)
23b2ce53 1347{
3502dc9c 1348 if (! REG_POINTER (reg))
00995e78 1349 {
3502dc9c 1350 REG_POINTER (reg) = 1;
86fe05e0 1351
00995e78
RE
1352 if (align)
1353 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1354 }
1355 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1356 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1357 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1358}
1359
1360/* Return 1 plus largest pseudo reg number used in the current function. */
1361
1362int
502b8322 1363max_reg_num (void)
23b2ce53
RS
1364{
1365 return reg_rtx_no;
1366}
1367
1368/* Return 1 + the largest label number used so far in the current function. */
1369
1370int
502b8322 1371max_label_num (void)
23b2ce53 1372{
23b2ce53
RS
1373 return label_num;
1374}
1375
1376/* Return first label number used in this function (if any were used). */
1377
1378int
502b8322 1379get_first_label_num (void)
23b2ce53
RS
1380{
1381 return first_label_num;
1382}
6de9cd9a
DN
1383
1384/* If the rtx for label was created during the expansion of a nested
1385 function, then first_label_num won't include this label number.
fa10beec 1386 Fix this now so that array indices work later. */
6de9cd9a
DN
1387
1388void
9aa50db7 1389maybe_set_first_label_num (rtx_code_label *x)
6de9cd9a
DN
1390{
1391 if (CODE_LABEL_NUMBER (x) < first_label_num)
1392 first_label_num = CODE_LABEL_NUMBER (x);
1393}
51b86113
DM
1394
1395/* For use by the RTL function loader, when mingling with normal
1396 functions.
1397 Ensure that label_num is greater than the label num of X, to avoid
1398 duplicate labels in the generated assembler. */
1399
1400void
1401maybe_set_max_label_num (rtx_code_label *x)
1402{
1403 if (CODE_LABEL_NUMBER (x) >= label_num)
1404 label_num = CODE_LABEL_NUMBER (x) + 1;
1405}
1406
23b2ce53
RS
1407\f
1408/* Return a value representing some low-order bits of X, where the number
1409 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1410 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1411 representation is returned.
1412
1413 This function handles the cases in common between gen_lowpart, below,
1414 and two variants in cse.c and combine.c. These are the cases that can
1415 be safely handled at all points in the compilation.
1416
1417 If this is not a case we can handle, return 0. */
1418
1419rtx
ef4bddc2 1420gen_lowpart_common (machine_mode mode, rtx x)
23b2ce53 1421{
ddef6bc7 1422 int msize = GET_MODE_SIZE (mode);
550d1387 1423 int xsize;
ef4bddc2 1424 machine_mode innermode;
550d1387
GK
1425
1426 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1427 so we have to make one up. Yuk. */
1428 innermode = GET_MODE (x);
481683e1 1429 if (CONST_INT_P (x)
db487452 1430 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
f4b31647 1431 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
550d1387 1432 else if (innermode == VOIDmode)
f4b31647 1433 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
b8698a0f 1434
550d1387
GK
1435 xsize = GET_MODE_SIZE (innermode);
1436
5b0264cb 1437 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1438
550d1387 1439 if (innermode == mode)
23b2ce53
RS
1440 return x;
1441
1442 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1443 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1444 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1445 return 0;
1446
53501a19 1447 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1448 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1449 return 0;
1450
54651377 1451 scalar_int_mode int_mode, int_innermode, from_mode;
23b2ce53 1452 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
54651377
RS
1453 && is_a <scalar_int_mode> (mode, &int_mode)
1454 && is_a <scalar_int_mode> (innermode, &int_innermode)
1455 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
23b2ce53
RS
1456 {
1457 /* If we are getting the low-order part of something that has been
1458 sign- or zero-extended, we can either just use the object being
1459 extended or make a narrower extension. If we want an even smaller
1460 piece than the size of the object being extended, call ourselves
1461 recursively.
1462
1463 This case is used mostly by combine and cse. */
1464
54651377 1465 if (from_mode == int_mode)
23b2ce53 1466 return XEXP (x, 0);
54651377
RS
1467 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1468 return gen_lowpart_common (int_mode, XEXP (x, 0));
1469 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1470 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
23b2ce53 1471 }
f8cfc6aa 1472 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1473 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1474 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
3403a1a9 1475 return lowpart_subreg (mode, x, innermode);
8aada4ad 1476
23b2ce53
RS
1477 /* Otherwise, we can't do this. */
1478 return 0;
1479}
1480\f
ccba022b 1481rtx
ef4bddc2 1482gen_highpart (machine_mode mode, rtx x)
ccba022b 1483{
ddef6bc7 1484 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1485 rtx result;
ddef6bc7 1486
ccba022b
RS
1487 /* This case loses if X is a subreg. To catch bugs early,
1488 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1489 gcc_assert (msize <= UNITS_PER_WORD
1490 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1491
e0e08ac2
JH
1492 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1493 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1494 gcc_assert (result);
b8698a0f 1495
09482e0d
JW
1496 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1497 the target if we have a MEM. gen_highpart must return a valid operand,
1498 emitting code if necessary to do so. */
5b0264cb
NS
1499 if (MEM_P (result))
1500 {
1501 result = validize_mem (result);
1502 gcc_assert (result);
1503 }
b8698a0f 1504
e0e08ac2
JH
1505 return result;
1506}
5222e470 1507
26d249eb 1508/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1509 be VOIDmode constant. */
1510rtx
ef4bddc2 1511gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
5222e470
JH
1512{
1513 if (GET_MODE (exp) != VOIDmode)
1514 {
5b0264cb 1515 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1516 return gen_highpart (outermode, exp);
1517 }
1518 return simplify_gen_subreg (outermode, exp, innermode,
1519 subreg_highpart_offset (outermode, innermode));
1520}
68252e27 1521
33951763
RS
1522/* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1523 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
8698cce3 1524
e0e08ac2 1525unsigned int
33951763 1526subreg_size_lowpart_offset (unsigned int outer_bytes, unsigned int inner_bytes)
e0e08ac2 1527{
33951763
RS
1528 if (outer_bytes > inner_bytes)
1529 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1530 return 0;
ddef6bc7 1531
33951763
RS
1532 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1533 return inner_bytes - outer_bytes;
1534 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1535 return 0;
1536 else
1537 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
ccba022b 1538}
eea50aa0 1539
33951763
RS
1540/* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1541 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1542
e0e08ac2 1543unsigned int
33951763
RS
1544subreg_size_highpart_offset (unsigned int outer_bytes,
1545 unsigned int inner_bytes)
eea50aa0 1546{
33951763 1547 gcc_assert (inner_bytes >= outer_bytes);
eea50aa0 1548
33951763
RS
1549 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1550 return 0;
1551 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1552 return inner_bytes - outer_bytes;
1553 else
1554 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1555 (inner_bytes - outer_bytes)
1556 * BITS_PER_UNIT);
eea50aa0 1557}
ccba022b 1558
23b2ce53
RS
1559/* Return 1 iff X, assumed to be a SUBREG,
1560 refers to the least significant part of its containing reg.
1561 If X is not a SUBREG, always return 1 (it is its own low part!). */
1562
1563int
fa233e34 1564subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1565{
1566 if (GET_CODE (x) != SUBREG)
1567 return 1;
a3a03040
RK
1568 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1569 return 0;
23b2ce53 1570
e0e08ac2
JH
1571 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1572 == SUBREG_BYTE (x));
23b2ce53
RS
1573}
1574\f
ddef6bc7
JJ
1575/* Return subword OFFSET of operand OP.
1576 The word number, OFFSET, is interpreted as the word number starting
1577 at the low-order address. OFFSET 0 is the low-order word if not
1578 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1579
1580 If we cannot extract the required word, we return zero. Otherwise,
1581 an rtx corresponding to the requested word will be returned.
1582
1583 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1584 reload has completed, a valid address will always be returned. After
1585 reload, if a valid address cannot be returned, we return zero.
1586
1587 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1588 it is the responsibility of the caller.
1589
1590 MODE is the mode of OP in case it is a CONST_INT.
1591
1592 ??? This is still rather broken for some cases. The problem for the
1593 moment is that all callers of this thing provide no 'goal mode' to
1594 tell us to work with. This exists because all callers were written
0631e0bf
JH
1595 in a word based SUBREG world.
1596 Now use of this function can be deprecated by simplify_subreg in most
1597 cases.
1598 */
ddef6bc7
JJ
1599
1600rtx
ef4bddc2 1601operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
ddef6bc7
JJ
1602{
1603 if (mode == VOIDmode)
1604 mode = GET_MODE (op);
1605
5b0264cb 1606 gcc_assert (mode != VOIDmode);
ddef6bc7 1607
30f7a378 1608 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1609 if (mode != BLKmode
1610 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1611 return 0;
1612
30f7a378 1613 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1614 if (mode != BLKmode
1615 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1616 return const0_rtx;
1617
ddef6bc7 1618 /* Form a new MEM at the requested address. */
3c0cb5de 1619 if (MEM_P (op))
ddef6bc7 1620 {
60564289 1621 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1622
f1ec5147 1623 if (! validate_address)
60564289 1624 return new_rtx;
f1ec5147
RK
1625
1626 else if (reload_completed)
ddef6bc7 1627 {
09e881c9
BE
1628 if (! strict_memory_address_addr_space_p (word_mode,
1629 XEXP (new_rtx, 0),
1630 MEM_ADDR_SPACE (op)))
f1ec5147 1631 return 0;
ddef6bc7 1632 }
f1ec5147 1633 else
60564289 1634 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1635 }
1636
0631e0bf
JH
1637 /* Rest can be handled by simplify_subreg. */
1638 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1639}
1640
535a42b1
NS
1641/* Similar to `operand_subword', but never return 0. If we can't
1642 extract the required subword, put OP into a register and try again.
1643 The second attempt must succeed. We always validate the address in
1644 this case.
23b2ce53
RS
1645
1646 MODE is the mode of OP, in case it is CONST_INT. */
1647
1648rtx
ef4bddc2 1649operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
23b2ce53 1650{
ddef6bc7 1651 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1652
1653 if (result)
1654 return result;
1655
1656 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1657 {
1658 /* If this is a register which can not be accessed by words, copy it
1659 to a pseudo register. */
f8cfc6aa 1660 if (REG_P (op))
77e6b0eb
JC
1661 op = copy_to_reg (op);
1662 else
1663 op = force_reg (mode, op);
1664 }
23b2ce53 1665
ddef6bc7 1666 result = operand_subword (op, offset, 1, mode);
5b0264cb 1667 gcc_assert (result);
23b2ce53
RS
1668
1669 return result;
1670}
1671\f
2b3493c8
AK
1672/* Returns 1 if both MEM_EXPR can be considered equal
1673 and 0 otherwise. */
1674
1675int
4f588890 1676mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1677{
1678 if (expr1 == expr2)
1679 return 1;
1680
1681 if (! expr1 || ! expr2)
1682 return 0;
1683
1684 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1685 return 0;
1686
55b34b5f 1687 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1688}
1689
805903b5
JJ
1690/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1691 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1692 -1 if not known. */
1693
1694int
d9223014 1695get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1696{
1697 tree expr;
1698 unsigned HOST_WIDE_INT offset;
1699
1700 /* This function can't use
527210c4 1701 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1702 || (MAX (MEM_ALIGN (mem),
0eb77834 1703 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1704 < align))
1705 return -1;
1706 else
527210c4 1707 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1708 for two reasons:
1709 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1710 for <variable>. get_inner_reference doesn't handle it and
1711 even if it did, the alignment in that case needs to be determined
1712 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1713 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1714 isn't sufficiently aligned, the object it is in might be. */
1715 gcc_assert (MEM_P (mem));
1716 expr = MEM_EXPR (mem);
527210c4 1717 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1718 return -1;
1719
527210c4 1720 offset = MEM_OFFSET (mem);
805903b5
JJ
1721 if (DECL_P (expr))
1722 {
1723 if (DECL_ALIGN (expr) < align)
1724 return -1;
1725 }
1726 else if (INDIRECT_REF_P (expr))
1727 {
1728 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1729 return -1;
1730 }
1731 else if (TREE_CODE (expr) == COMPONENT_REF)
1732 {
1733 while (1)
1734 {
1735 tree inner = TREE_OPERAND (expr, 0);
1736 tree field = TREE_OPERAND (expr, 1);
1737 tree byte_offset = component_ref_field_offset (expr);
1738 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1739
1740 if (!byte_offset
cc269bb6
RS
1741 || !tree_fits_uhwi_p (byte_offset)
1742 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1743 return -1;
1744
ae7e9ddd
RS
1745 offset += tree_to_uhwi (byte_offset);
1746 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1747
1748 if (inner == NULL_TREE)
1749 {
1750 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1751 < (unsigned int) align)
1752 return -1;
1753 break;
1754 }
1755 else if (DECL_P (inner))
1756 {
1757 if (DECL_ALIGN (inner) < align)
1758 return -1;
1759 break;
1760 }
1761 else if (TREE_CODE (inner) != COMPONENT_REF)
1762 return -1;
1763 expr = inner;
1764 }
1765 }
1766 else
1767 return -1;
1768
1769 return offset & ((align / BITS_PER_UNIT) - 1);
1770}
1771
6926c713 1772/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1773 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1774 if we are making a new object of this type. BITPOS is nonzero if
1775 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1776
1777void
502b8322
AJ
1778set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1779 HOST_WIDE_INT bitpos)
173b24b9 1780{
6f1087be 1781 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1782 tree type;
f12144dd 1783 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1784 addr_space_t as;
173b24b9
RK
1785
1786 /* It can happen that type_for_mode was given a mode for which there
1787 is no language-level type. In which case it returns NULL, which
1788 we can see here. */
1789 if (t == NULL_TREE)
1790 return;
1791
1792 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1793 if (type == error_mark_node)
1794 return;
173b24b9 1795
173b24b9
RK
1796 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1797 wrong answer, as it assumes that DECL_RTL already has the right alias
1798 info. Callers should not set DECL_RTL until after the call to
1799 set_mem_attributes. */
5b0264cb 1800 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1801
f12144dd
RS
1802 memset (&attrs, 0, sizeof (attrs));
1803
738cc472 1804 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1805 front-end routine) and use it. */
f12144dd 1806 attrs.alias = get_alias_set (t);
173b24b9 1807
a5e9c810 1808 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1809 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1810
268f7033 1811 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1812 refattrs = MEM_ATTRS (ref);
1813 if (refattrs)
268f7033
UW
1814 {
1815 /* ??? Can this ever happen? Calling this routine on a MEM that
1816 already carries memory attributes should probably be invalid. */
f12144dd 1817 attrs.expr = refattrs->expr;
754c3d5d 1818 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1819 attrs.offset = refattrs->offset;
754c3d5d 1820 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1821 attrs.size = refattrs->size;
1822 attrs.align = refattrs->align;
268f7033
UW
1823 }
1824
1825 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1826 else
268f7033 1827 {
f12144dd
RS
1828 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1829 gcc_assert (!defattrs->expr);
754c3d5d 1830 gcc_assert (!defattrs->offset_known_p);
f12144dd 1831
268f7033 1832 /* Respect mode size. */
754c3d5d 1833 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1834 attrs.size = defattrs->size;
268f7033
UW
1835 /* ??? Is this really necessary? We probably should always get
1836 the size from the type below. */
1837
1838 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1839 if T is an object, always compute the object alignment below. */
f12144dd
RS
1840 if (TYPE_P (t))
1841 attrs.align = defattrs->align;
1842 else
1843 attrs.align = BITS_PER_UNIT;
268f7033
UW
1844 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1845 e.g. if the type carries an alignment attribute. Should we be
1846 able to simply always use TYPE_ALIGN? */
1847 }
1848
25b75a48
BE
1849 /* We can set the alignment from the type if we are making an object or if
1850 this is an INDIRECT_REF. */
1851 if (objectp || TREE_CODE (t) == INDIRECT_REF)
f12144dd 1852 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1853
738cc472 1854 /* If the size is known, we can set that. */
a787ccc3 1855 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1856
30b0317c
RB
1857 /* The address-space is that of the type. */
1858 as = TYPE_ADDR_SPACE (type);
1859
80965c18
RK
1860 /* If T is not a type, we may be able to deduce some more information about
1861 the expression. */
1862 if (! TYPE_P (t))
8ac61af7 1863 {
8476af98 1864 tree base;
389fdba0 1865
8ac61af7
RK
1866 if (TREE_THIS_VOLATILE (t))
1867 MEM_VOLATILE_P (ref) = 1;
173b24b9 1868
c56e3582
RK
1869 /* Now remove any conversions: they don't change what the underlying
1870 object is. Likewise for SAVE_EXPR. */
1043771b 1871 while (CONVERT_EXPR_P (t)
c56e3582
RK
1872 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1873 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1874 t = TREE_OPERAND (t, 0);
1875
4994da65
RG
1876 /* Note whether this expression can trap. */
1877 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1878
1879 base = get_base_address (t);
f18a7b25
MJ
1880 if (base)
1881 {
1882 if (DECL_P (base)
1883 && TREE_READONLY (base)
1884 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1885 && !TREE_THIS_VOLATILE (base))
1886 MEM_READONLY_P (ref) = 1;
1887
1888 /* Mark static const strings readonly as well. */
1889 if (TREE_CODE (base) == STRING_CST
1890 && TREE_READONLY (base)
1891 && TREE_STATIC (base))
1892 MEM_READONLY_P (ref) = 1;
1893
30b0317c 1894 /* Address-space information is on the base object. */
f18a7b25
MJ
1895 if (TREE_CODE (base) == MEM_REF
1896 || TREE_CODE (base) == TARGET_MEM_REF)
1897 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1898 0))));
1899 else
1900 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1901 }
ba30e50d 1902
2039d7aa
RH
1903 /* If this expression uses it's parent's alias set, mark it such
1904 that we won't change it. */
b4ada065 1905 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
1906 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1907
8ac61af7
RK
1908 /* If this is a decl, set the attributes of the MEM from it. */
1909 if (DECL_P (t))
1910 {
f12144dd 1911 attrs.expr = t;
754c3d5d
RS
1912 attrs.offset_known_p = true;
1913 attrs.offset = 0;
6f1087be 1914 apply_bitpos = bitpos;
a787ccc3 1915 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1916 }
1917
30b0317c 1918 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1919 else if (CONSTANT_CLASS_P (t))
30b0317c 1920 ;
998d7deb 1921
a787ccc3
RS
1922 /* If this is a field reference, record it. */
1923 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1924 {
f12144dd 1925 attrs.expr = t;
754c3d5d
RS
1926 attrs.offset_known_p = true;
1927 attrs.offset = 0;
6f1087be 1928 apply_bitpos = bitpos;
a787ccc3
RS
1929 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1930 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1931 }
1932
1933 /* If this is an array reference, look for an outer field reference. */
1934 else if (TREE_CODE (t) == ARRAY_REF)
1935 {
1936 tree off_tree = size_zero_node;
1b1838b6
JW
1937 /* We can't modify t, because we use it at the end of the
1938 function. */
1939 tree t2 = t;
998d7deb
RH
1940
1941 do
1942 {
1b1838b6 1943 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1944 tree low_bound = array_ref_low_bound (t2);
1945 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1946
1947 /* We assume all arrays have sizes that are a multiple of a byte.
1948 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1949 index, then convert to sizetype and multiply by the size of
1950 the array element. */
1951 if (! integer_zerop (low_bound))
4845b383
KH
1952 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1953 index, low_bound);
2567406a 1954
44de5aeb 1955 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1956 size_binop (MULT_EXPR,
1957 fold_convert (sizetype,
1958 index),
44de5aeb
RK
1959 unit_size),
1960 off_tree);
1b1838b6 1961 t2 = TREE_OPERAND (t2, 0);
998d7deb 1962 }
1b1838b6 1963 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1964
30b0317c 1965 if (DECL_P (t2)
12ead254
RB
1966 || (TREE_CODE (t2) == COMPONENT_REF
1967 /* For trailing arrays t2 doesn't have a size that
1968 covers all valid accesses. */
c3e46927 1969 && ! array_at_struct_end_p (t)))
998d7deb 1970 {
f12144dd 1971 attrs.expr = t2;
754c3d5d 1972 attrs.offset_known_p = false;
cc269bb6 1973 if (tree_fits_uhwi_p (off_tree))
6f1087be 1974 {
754c3d5d 1975 attrs.offset_known_p = true;
ae7e9ddd 1976 attrs.offset = tree_to_uhwi (off_tree);
6f1087be
RH
1977 apply_bitpos = bitpos;
1978 }
998d7deb 1979 }
30b0317c 1980 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1981 }
1982
56c47f22 1983 /* If this is an indirect reference, record it. */
70f34814 1984 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1985 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1986 {
f12144dd 1987 attrs.expr = t;
754c3d5d
RS
1988 attrs.offset_known_p = true;
1989 attrs.offset = 0;
56c47f22
RG
1990 apply_bitpos = bitpos;
1991 }
1992
30b0317c
RB
1993 /* Compute the alignment. */
1994 unsigned int obj_align;
1995 unsigned HOST_WIDE_INT obj_bitpos;
1996 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1997 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1998 if (obj_bitpos != 0)
146ec50f 1999 obj_align = least_bit_hwi (obj_bitpos);
30b0317c 2000 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
2001 }
2002
cc269bb6 2003 if (tree_fits_uhwi_p (new_size))
a787ccc3
RS
2004 {
2005 attrs.size_known_p = true;
ae7e9ddd 2006 attrs.size = tree_to_uhwi (new_size);
a787ccc3
RS
2007 }
2008
15c812e3 2009 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
2010 bit position offset. Similarly, increase the size of the accessed
2011 object to contain the negative offset. */
6f1087be 2012 if (apply_bitpos)
8c317c5f 2013 {
754c3d5d
RS
2014 gcc_assert (attrs.offset_known_p);
2015 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
2016 if (attrs.size_known_p)
2017 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 2018 }
6f1087be 2019
8ac61af7 2020 /* Now set the attributes we computed above. */
f18a7b25 2021 attrs.addrspace = as;
f12144dd 2022 set_mem_attrs (ref, &attrs);
173b24b9
RK
2023}
2024
6f1087be 2025void
502b8322 2026set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
2027{
2028 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2029}
2030
173b24b9
RK
2031/* Set the alias set of MEM to SET. */
2032
2033void
4862826d 2034set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 2035{
f12144dd
RS
2036 struct mem_attrs attrs;
2037
173b24b9 2038 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 2039 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
2040 attrs = *get_mem_attrs (mem);
2041 attrs.alias = set;
2042 set_mem_attrs (mem, &attrs);
09e881c9
BE
2043}
2044
2045/* Set the address space of MEM to ADDRSPACE (target-defined). */
2046
2047void
2048set_mem_addr_space (rtx mem, addr_space_t addrspace)
2049{
f12144dd
RS
2050 struct mem_attrs attrs;
2051
2052 attrs = *get_mem_attrs (mem);
2053 attrs.addrspace = addrspace;
2054 set_mem_attrs (mem, &attrs);
173b24b9 2055}
738cc472 2056
d022d93e 2057/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
2058
2059void
502b8322 2060set_mem_align (rtx mem, unsigned int align)
738cc472 2061{
f12144dd
RS
2062 struct mem_attrs attrs;
2063
2064 attrs = *get_mem_attrs (mem);
2065 attrs.align = align;
2066 set_mem_attrs (mem, &attrs);
738cc472 2067}
1285011e 2068
998d7deb 2069/* Set the expr for MEM to EXPR. */
1285011e
RK
2070
2071void
502b8322 2072set_mem_expr (rtx mem, tree expr)
1285011e 2073{
f12144dd
RS
2074 struct mem_attrs attrs;
2075
2076 attrs = *get_mem_attrs (mem);
2077 attrs.expr = expr;
2078 set_mem_attrs (mem, &attrs);
1285011e 2079}
998d7deb
RH
2080
2081/* Set the offset of MEM to OFFSET. */
2082
2083void
527210c4 2084set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 2085{
f12144dd
RS
2086 struct mem_attrs attrs;
2087
2088 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2089 attrs.offset_known_p = true;
2090 attrs.offset = offset;
527210c4
RS
2091 set_mem_attrs (mem, &attrs);
2092}
2093
2094/* Clear the offset of MEM. */
2095
2096void
2097clear_mem_offset (rtx mem)
2098{
2099 struct mem_attrs attrs;
2100
2101 attrs = *get_mem_attrs (mem);
754c3d5d 2102 attrs.offset_known_p = false;
f12144dd 2103 set_mem_attrs (mem, &attrs);
35aff10b
AM
2104}
2105
2106/* Set the size of MEM to SIZE. */
2107
2108void
f5541398 2109set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 2110{
f12144dd
RS
2111 struct mem_attrs attrs;
2112
2113 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2114 attrs.size_known_p = true;
2115 attrs.size = size;
f5541398
RS
2116 set_mem_attrs (mem, &attrs);
2117}
2118
2119/* Clear the size of MEM. */
2120
2121void
2122clear_mem_size (rtx mem)
2123{
2124 struct mem_attrs attrs;
2125
2126 attrs = *get_mem_attrs (mem);
754c3d5d 2127 attrs.size_known_p = false;
f12144dd 2128 set_mem_attrs (mem, &attrs);
998d7deb 2129}
173b24b9 2130\f
738cc472
RK
2131/* Return a memory reference like MEMREF, but with its mode changed to MODE
2132 and its address changed to ADDR. (VOIDmode means don't change the mode.
2133 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2134 returned memory location is required to be valid. INPLACE is true if any
2135 changes can be made directly to MEMREF or false if MEMREF must be treated
2136 as immutable.
2137
2138 The memory attributes are not changed. */
23b2ce53 2139
738cc472 2140static rtx
ef4bddc2 2141change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
23b33725 2142 bool inplace)
23b2ce53 2143{
09e881c9 2144 addr_space_t as;
60564289 2145 rtx new_rtx;
23b2ce53 2146
5b0264cb 2147 gcc_assert (MEM_P (memref));
09e881c9 2148 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2149 if (mode == VOIDmode)
2150 mode = GET_MODE (memref);
2151 if (addr == 0)
2152 addr = XEXP (memref, 0);
a74ff877 2153 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2154 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2155 return memref;
23b2ce53 2156
91c5ee5b
VM
2157 /* Don't validate address for LRA. LRA can make the address valid
2158 by itself in most efficient way. */
2159 if (validate && !lra_in_progress)
23b2ce53 2160 {
f1ec5147 2161 if (reload_in_progress || reload_completed)
09e881c9 2162 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2163 else
09e881c9 2164 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2165 }
750c9258 2166
9b04c6a8
RK
2167 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2168 return memref;
2169
23b33725
RS
2170 if (inplace)
2171 {
2172 XEXP (memref, 0) = addr;
2173 return memref;
2174 }
2175
60564289
KG
2176 new_rtx = gen_rtx_MEM (mode, addr);
2177 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2178 return new_rtx;
23b2ce53 2179}
792760b9 2180
738cc472
RK
2181/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2182 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2183
2184rtx
ef4bddc2 2185change_address (rtx memref, machine_mode mode, rtx addr)
f4ef873c 2186{
23b33725 2187 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
ef4bddc2 2188 machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2189 struct mem_attrs attrs, *defattrs;
4e44c1ef 2190
f12144dd
RS
2191 attrs = *get_mem_attrs (memref);
2192 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2193 attrs.expr = NULL_TREE;
2194 attrs.offset_known_p = false;
2195 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2196 attrs.size = defattrs->size;
2197 attrs.align = defattrs->align;
c2f7bcc3 2198
fdb1c7b3 2199 /* If there are no changes, just return the original memory reference. */
60564289 2200 if (new_rtx == memref)
4e44c1ef 2201 {
f12144dd 2202 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2203 return new_rtx;
4e44c1ef 2204
60564289
KG
2205 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2206 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2207 }
fdb1c7b3 2208
f12144dd 2209 set_mem_attrs (new_rtx, &attrs);
60564289 2210 return new_rtx;
f4ef873c 2211}
792760b9 2212
738cc472
RK
2213/* Return a memory reference like MEMREF, but with its mode changed
2214 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2215 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2216 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2217 and the caller is responsible for adjusting MEMREF base register.
2218 If ADJUST_OBJECT is zero, the underlying object associated with the
2219 memory reference is left unchanged and the caller is responsible for
2220 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2221 the underlying object, even partially, then the object is dropped.
2222 SIZE, if nonzero, is the size of an access in cases where MODE
2223 has no inherent size. */
f1ec5147
RK
2224
2225rtx
ef4bddc2 2226adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2227 int validate, int adjust_address, int adjust_object,
2228 HOST_WIDE_INT size)
f1ec5147 2229{
823e3574 2230 rtx addr = XEXP (memref, 0);
60564289 2231 rtx new_rtx;
095a2d76 2232 scalar_int_mode address_mode;
a6fe9ed4 2233 int pbits;
0207fa90 2234 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2235 unsigned HOST_WIDE_INT max_align;
0207fa90 2236#ifdef POINTERS_EXTEND_UNSIGNED
095a2d76 2237 scalar_int_mode pointer_mode
0207fa90
EB
2238 = targetm.addr_space.pointer_mode (attrs.addrspace);
2239#endif
823e3574 2240
ee88e690
EB
2241 /* VOIDmode means no mode change for change_address_1. */
2242 if (mode == VOIDmode)
2243 mode = GET_MODE (memref);
2244
5f2cbd0d
RS
2245 /* Take the size of non-BLKmode accesses from the mode. */
2246 defattrs = mode_mem_attrs[(int) mode];
2247 if (defattrs->size_known_p)
2248 size = defattrs->size;
2249
fdb1c7b3
JH
2250 /* If there are no changes, just return the original memory reference. */
2251 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2252 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2253 && (!validate || memory_address_addr_space_p (mode, addr,
2254 attrs.addrspace)))
fdb1c7b3
JH
2255 return memref;
2256
d14419e4 2257 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2258 This may happen even if offset is nonzero -- consider
d14419e4
RH
2259 (plus (plus reg reg) const_int) -- so do this always. */
2260 addr = copy_rtx (addr);
2261
a6fe9ed4
JM
2262 /* Convert a possibly large offset to a signed value within the
2263 range of the target address space. */
372d6395 2264 address_mode = get_address_mode (memref);
d4ebfa65 2265 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2266 if (HOST_BITS_PER_WIDE_INT > pbits)
2267 {
2268 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2269 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2270 >> shift);
2271 }
2272
5ef0b50d 2273 if (adjust_address)
4a78c787
RH
2274 {
2275 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2276 object, we can merge it into the LO_SUM. */
2277 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2278 && offset >= 0
2279 && (unsigned HOST_WIDE_INT) offset
2280 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2281 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2282 plus_constant (address_mode,
2283 XEXP (addr, 1), offset));
0207fa90
EB
2284#ifdef POINTERS_EXTEND_UNSIGNED
2285 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2286 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2287 the fact that pointers are not allowed to overflow. */
2288 else if (POINTERS_EXTEND_UNSIGNED > 0
2289 && GET_CODE (addr) == ZERO_EXTEND
2290 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2291 && trunc_int_for_mode (offset, pointer_mode) == offset)
2292 addr = gen_rtx_ZERO_EXTEND (address_mode,
2293 plus_constant (pointer_mode,
2294 XEXP (addr, 0), offset));
2295#endif
4a78c787 2296 else
0a81f074 2297 addr = plus_constant (address_mode, addr, offset);
4a78c787 2298 }
823e3574 2299
23b33725 2300 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2301
09efeca1
PB
2302 /* If the address is a REG, change_address_1 rightfully returns memref,
2303 but this would destroy memref's MEM_ATTRS. */
2304 if (new_rtx == memref && offset != 0)
2305 new_rtx = copy_rtx (new_rtx);
2306
5ef0b50d
EB
2307 /* Conservatively drop the object if we don't know where we start from. */
2308 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2309 {
2310 attrs.expr = NULL_TREE;
2311 attrs.alias = 0;
2312 }
2313
738cc472
RK
2314 /* Compute the new values of the memory attributes due to this adjustment.
2315 We add the offsets and update the alignment. */
754c3d5d 2316 if (attrs.offset_known_p)
5ef0b50d
EB
2317 {
2318 attrs.offset += offset;
2319
2320 /* Drop the object if the new left end is not within its bounds. */
2321 if (adjust_object && attrs.offset < 0)
2322 {
2323 attrs.expr = NULL_TREE;
2324 attrs.alias = 0;
2325 }
2326 }
738cc472 2327
03bf2c23
RK
2328 /* Compute the new alignment by taking the MIN of the alignment and the
2329 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2330 if zero. */
2331 if (offset != 0)
f12144dd 2332 {
146ec50f 2333 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
f12144dd
RS
2334 attrs.align = MIN (attrs.align, max_align);
2335 }
738cc472 2336
5f2cbd0d 2337 if (size)
754c3d5d 2338 {
5ef0b50d 2339 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2340 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2341 {
2342 attrs.expr = NULL_TREE;
2343 attrs.alias = 0;
2344 }
754c3d5d 2345 attrs.size_known_p = true;
5f2cbd0d 2346 attrs.size = size;
754c3d5d
RS
2347 }
2348 else if (attrs.size_known_p)
5ef0b50d 2349 {
5f2cbd0d 2350 gcc_assert (!adjust_object);
5ef0b50d 2351 attrs.size -= offset;
5f2cbd0d
RS
2352 /* ??? The store_by_pieces machinery generates negative sizes,
2353 so don't assert for that here. */
5ef0b50d 2354 }
10b76d73 2355
f12144dd 2356 set_mem_attrs (new_rtx, &attrs);
738cc472 2357
60564289 2358 return new_rtx;
f1ec5147
RK
2359}
2360
630036c6
JJ
2361/* Return a memory reference like MEMREF, but with its mode changed
2362 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2363 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2364 nonzero, the memory address is forced to be valid. */
2365
2366rtx
ef4bddc2 2367adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
502b8322 2368 HOST_WIDE_INT offset, int validate)
630036c6 2369{
23b33725 2370 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2371 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2372}
2373
8ac61af7
RK
2374/* Return a memory reference like MEMREF, but whose address is changed by
2375 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2376 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2377
2378rtx
502b8322 2379offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2380{
60564289 2381 rtx new_rtx, addr = XEXP (memref, 0);
ef4bddc2 2382 machine_mode address_mode;
754c3d5d 2383 struct mem_attrs attrs, *defattrs;
e3c8ea67 2384
f12144dd 2385 attrs = *get_mem_attrs (memref);
372d6395 2386 address_mode = get_address_mode (memref);
d4ebfa65 2387 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2388
68252e27 2389 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2390 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2391
2392 However, if we did go and rearrange things, we can wind up not
2393 being able to recognize the magic around pic_offset_table_rtx.
2394 This stuff is fragile, and is yet another example of why it is
2395 bad to expose PIC machinery too early. */
f12144dd
RS
2396 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2397 attrs.addrspace)
e3c8ea67
RH
2398 && GET_CODE (addr) == PLUS
2399 && XEXP (addr, 0) == pic_offset_table_rtx)
2400 {
2401 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2402 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2403 }
2404
60564289 2405 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2406 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2407
fdb1c7b3 2408 /* If there are no changes, just return the original memory reference. */
60564289
KG
2409 if (new_rtx == memref)
2410 return new_rtx;
fdb1c7b3 2411
0d4903b8
RK
2412 /* Update the alignment to reflect the offset. Reset the offset, which
2413 we don't know. */
754c3d5d
RS
2414 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2415 attrs.offset_known_p = false;
2416 attrs.size_known_p = defattrs->size_known_p;
2417 attrs.size = defattrs->size;
f12144dd
RS
2418 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2419 set_mem_attrs (new_rtx, &attrs);
60564289 2420 return new_rtx;
0d4903b8 2421}
68252e27 2422
792760b9
RK
2423/* Return a memory reference like MEMREF, but with its address changed to
2424 ADDR. The caller is asserting that the actual piece of memory pointed
2425 to is the same, just the form of the address is being changed, such as
23b33725
RS
2426 by putting something into a register. INPLACE is true if any changes
2427 can be made directly to MEMREF or false if MEMREF must be treated as
2428 immutable. */
792760b9
RK
2429
2430rtx
23b33725 2431replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2432{
738cc472
RK
2433 /* change_address_1 copies the memory attribute structure without change
2434 and that's exactly what we want here. */
40c0668b 2435 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2436 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2437}
738cc472 2438
f1ec5147
RK
2439/* Likewise, but the reference is not required to be valid. */
2440
2441rtx
23b33725 2442replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2443{
23b33725 2444 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2445}
e7dfe4bb
RH
2446
2447/* Return a memory reference like MEMREF, but with its mode widened to
2448 MODE and offset by OFFSET. This would be used by targets that e.g.
2449 cannot issue QImode memory operations and have to use SImode memory
2450 operations plus masking logic. */
2451
2452rtx
ef4bddc2 2453widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2454{
5f2cbd0d 2455 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2456 struct mem_attrs attrs;
e7dfe4bb
RH
2457 unsigned int size = GET_MODE_SIZE (mode);
2458
fdb1c7b3 2459 /* If there are no changes, just return the original memory reference. */
60564289
KG
2460 if (new_rtx == memref)
2461 return new_rtx;
fdb1c7b3 2462
f12144dd
RS
2463 attrs = *get_mem_attrs (new_rtx);
2464
e7dfe4bb
RH
2465 /* If we don't know what offset we were at within the expression, then
2466 we can't know if we've overstepped the bounds. */
754c3d5d 2467 if (! attrs.offset_known_p)
f12144dd 2468 attrs.expr = NULL_TREE;
e7dfe4bb 2469
f12144dd 2470 while (attrs.expr)
e7dfe4bb 2471 {
f12144dd 2472 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2473 {
f12144dd
RS
2474 tree field = TREE_OPERAND (attrs.expr, 1);
2475 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2476
2477 if (! DECL_SIZE_UNIT (field))
2478 {
f12144dd 2479 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2480 break;
2481 }
2482
2483 /* Is the field at least as large as the access? If so, ok,
2484 otherwise strip back to the containing structure. */
03667700
RK
2485 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2486 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2487 && attrs.offset >= 0)
e7dfe4bb
RH
2488 break;
2489
cc269bb6 2490 if (! tree_fits_uhwi_p (offset))
e7dfe4bb 2491 {
f12144dd 2492 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2493 break;
2494 }
2495
f12144dd 2496 attrs.expr = TREE_OPERAND (attrs.expr, 0);
ae7e9ddd
RS
2497 attrs.offset += tree_to_uhwi (offset);
2498 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2499 / BITS_PER_UNIT);
e7dfe4bb
RH
2500 }
2501 /* Similarly for the decl. */
f12144dd
RS
2502 else if (DECL_P (attrs.expr)
2503 && DECL_SIZE_UNIT (attrs.expr)
2504 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2505 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2506 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2507 break;
2508 else
2509 {
2510 /* The widened memory access overflows the expression, which means
2511 that it could alias another expression. Zap it. */
f12144dd 2512 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2513 break;
2514 }
2515 }
2516
f12144dd 2517 if (! attrs.expr)
754c3d5d 2518 attrs.offset_known_p = false;
e7dfe4bb
RH
2519
2520 /* The widened memory may alias other stuff, so zap the alias set. */
2521 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2522 attrs.alias = 0;
754c3d5d
RS
2523 attrs.size_known_p = true;
2524 attrs.size = size;
f12144dd 2525 set_mem_attrs (new_rtx, &attrs);
60564289 2526 return new_rtx;
e7dfe4bb 2527}
23b2ce53 2528\f
f6129d66
RH
2529/* A fake decl that is used as the MEM_EXPR of spill slots. */
2530static GTY(()) tree spill_slot_decl;
2531
3d7e23f6
RH
2532tree
2533get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2534{
2535 tree d = spill_slot_decl;
2536 rtx rd;
f12144dd 2537 struct mem_attrs attrs;
f6129d66 2538
3d7e23f6 2539 if (d || !force_build_p)
f6129d66
RH
2540 return d;
2541
c2255bc4
AH
2542 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2543 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2544 DECL_ARTIFICIAL (d) = 1;
2545 DECL_IGNORED_P (d) = 1;
2546 TREE_USED (d) = 1;
f6129d66
RH
2547 spill_slot_decl = d;
2548
2549 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2550 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2551 attrs = *mode_mem_attrs[(int) BLKmode];
2552 attrs.alias = new_alias_set ();
2553 attrs.expr = d;
2554 set_mem_attrs (rd, &attrs);
f6129d66
RH
2555 SET_DECL_RTL (d, rd);
2556
2557 return d;
2558}
2559
2560/* Given MEM, a result from assign_stack_local, fill in the memory
2561 attributes as appropriate for a register allocator spill slot.
2562 These slots are not aliasable by other memory. We arrange for
2563 them all to use a single MEM_EXPR, so that the aliasing code can
2564 work properly in the case of shared spill slots. */
2565
2566void
2567set_mem_attrs_for_spill (rtx mem)
2568{
f12144dd
RS
2569 struct mem_attrs attrs;
2570 rtx addr;
f6129d66 2571
f12144dd
RS
2572 attrs = *get_mem_attrs (mem);
2573 attrs.expr = get_spill_slot_decl (true);
2574 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2575 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2576
2577 /* We expect the incoming memory to be of the form:
2578 (mem:MODE (plus (reg sfp) (const_int offset)))
2579 with perhaps the plus missing for offset = 0. */
2580 addr = XEXP (mem, 0);
754c3d5d
RS
2581 attrs.offset_known_p = true;
2582 attrs.offset = 0;
f6129d66 2583 if (GET_CODE (addr) == PLUS
481683e1 2584 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2585 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2586
f12144dd 2587 set_mem_attrs (mem, &attrs);
f6129d66
RH
2588 MEM_NOTRAP_P (mem) = 1;
2589}
2590\f
23b2ce53
RS
2591/* Return a newly created CODE_LABEL rtx with a unique label number. */
2592
7dcc3ab5 2593rtx_code_label *
502b8322 2594gen_label_rtx (void)
23b2ce53 2595{
7dcc3ab5
DM
2596 return as_a <rtx_code_label *> (
2597 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2598 NULL, label_num++, NULL));
23b2ce53
RS
2599}
2600\f
2601/* For procedure integration. */
2602
23b2ce53 2603/* Install new pointers to the first and last insns in the chain.
86fe05e0 2604 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2605 Used for an inline-procedure after copying the insn chain. */
2606
2607void
fee3e72c 2608set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2609{
fee3e72c 2610 rtx_insn *insn;
86fe05e0 2611
5936d944
JH
2612 set_first_insn (first);
2613 set_last_insn (last);
86fe05e0
RK
2614 cur_insn_uid = 0;
2615
b5b8b0ac
AO
2616 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2617 {
2618 int debug_count = 0;
2619
2620 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2621 cur_debug_insn_uid = 0;
2622
2623 for (insn = first; insn; insn = NEXT_INSN (insn))
2624 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2625 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2626 else
2627 {
2628 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2629 if (DEBUG_INSN_P (insn))
2630 debug_count++;
2631 }
2632
2633 if (debug_count)
2634 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2635 else
2636 cur_debug_insn_uid++;
2637 }
2638 else
2639 for (insn = first; insn; insn = NEXT_INSN (insn))
2640 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2641
2642 cur_insn_uid++;
23b2ce53 2643}
23b2ce53 2644\f
750c9258 2645/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2646 structure. This routine should only be called once. */
23b2ce53 2647
fd743bc1 2648static void
6bb9bf63 2649unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2650{
d1b81779 2651 /* Unshare just about everything else. */
2c07f13b 2652 unshare_all_rtl_in_chain (insn);
750c9258 2653
23b2ce53
RS
2654 /* Make sure the addresses of stack slots found outside the insn chain
2655 (such as, in DECL_RTL of a variable) are not shared
2656 with the insn chain.
2657
2658 This special care is necessary when the stack slot MEM does not
2659 actually appear in the insn chain. If it does appear, its address
2660 is unshared from all else at that point. */
8c39f8ae
TS
2661 unsigned int i;
2662 rtx temp;
2663 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2664 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
23b2ce53
RS
2665}
2666
750c9258 2667/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2668 structure, again. This is a fairly expensive thing to do so it
2669 should be done sparingly. */
2670
2671void
6bb9bf63 2672unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2673{
6bb9bf63 2674 rtx_insn *p;
624c87aa
RE
2675 tree decl;
2676
d1b81779 2677 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2678 if (INSN_P (p))
d1b81779
GK
2679 {
2680 reset_used_flags (PATTERN (p));
2681 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2682 if (CALL_P (p))
2683 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2684 }
624c87aa 2685
2d4aecb3 2686 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2687 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2688
624c87aa 2689 /* Make sure that virtual parameters are not shared. */
910ad8de 2690 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2691 set_used_flags (DECL_RTL (decl));
624c87aa 2692
8c39f8ae
TS
2693 rtx temp;
2694 unsigned int i;
2695 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2696 reset_used_flags (temp);
624c87aa 2697
b4aaa77b 2698 unshare_all_rtl_1 (insn);
fd743bc1
PB
2699}
2700
c2924966 2701unsigned int
fd743bc1
PB
2702unshare_all_rtl (void)
2703{
b4aaa77b 2704 unshare_all_rtl_1 (get_insns ());
60ebe8ce
JJ
2705
2706 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2707 {
2708 if (DECL_RTL_SET_P (decl))
2709 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2710 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2711 }
2712
c2924966 2713 return 0;
d1b81779
GK
2714}
2715
ef330312 2716
2c07f13b
JH
2717/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2718 Recursively does the same for subexpressions. */
2719
2720static void
2721verify_rtx_sharing (rtx orig, rtx insn)
2722{
2723 rtx x = orig;
2724 int i;
2725 enum rtx_code code;
2726 const char *format_ptr;
2727
2728 if (x == 0)
2729 return;
2730
2731 code = GET_CODE (x);
2732
2733 /* These types may be freely shared. */
2734
2735 switch (code)
2736 {
2737 case REG:
0ca5af51
AO
2738 case DEBUG_EXPR:
2739 case VALUE:
d8116890 2740 CASE_CONST_ANY:
2c07f13b
JH
2741 case SYMBOL_REF:
2742 case LABEL_REF:
2743 case CODE_LABEL:
2744 case PC:
2745 case CC0:
3810076b 2746 case RETURN:
26898771 2747 case SIMPLE_RETURN:
2c07f13b 2748 case SCRATCH:
3e89ed8d 2749 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2750 return;
3e89ed8d 2751 case CLOBBER:
c5c5ba89
JH
2752 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2753 clobbers or clobbers of hard registers that originated as pseudos.
2754 This is needed to allow safe register renaming. */
d7ae3739
EB
2755 if (REG_P (XEXP (x, 0))
2756 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2757 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3e89ed8d
JH
2758 return;
2759 break;
2c07f13b
JH
2760
2761 case CONST:
6fb5fa3c 2762 if (shared_const_p (orig))
2c07f13b
JH
2763 return;
2764 break;
2765
2766 case MEM:
2767 /* A MEM is allowed to be shared if its address is constant. */
2768 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2769 || reload_completed || reload_in_progress)
2770 return;
2771
2772 break;
2773
2774 default:
2775 break;
2776 }
2777
2778 /* This rtx may not be shared. If it has already been seen,
2779 replace it with a copy of itself. */
b2b29377 2780 if (flag_checking && RTX_FLAG (x, used))
2c07f13b 2781 {
ab532386 2782 error ("invalid rtl sharing found in the insn");
2c07f13b 2783 debug_rtx (insn);
ab532386 2784 error ("shared rtx");
2c07f13b 2785 debug_rtx (x);
ab532386 2786 internal_error ("internal consistency failure");
2c07f13b 2787 }
1a2caa7a 2788 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2789
2c07f13b
JH
2790 RTX_FLAG (x, used) = 1;
2791
6614fd40 2792 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2793
2794 format_ptr = GET_RTX_FORMAT (code);
2795
2796 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2797 {
2798 switch (*format_ptr++)
2799 {
2800 case 'e':
2801 verify_rtx_sharing (XEXP (x, i), insn);
2802 break;
2803
2804 case 'E':
2805 if (XVEC (x, i) != NULL)
2806 {
2807 int j;
2808 int len = XVECLEN (x, i);
2809
2810 for (j = 0; j < len; j++)
2811 {
1a2caa7a
NS
2812 /* We allow sharing of ASM_OPERANDS inside single
2813 instruction. */
2c07f13b 2814 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2815 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2816 == ASM_OPERANDS))
2c07f13b
JH
2817 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2818 else
2819 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2820 }
2821 }
2822 break;
2823 }
2824 }
2825 return;
2826}
2827
0e0f87d4
SB
2828/* Reset used-flags for INSN. */
2829
2830static void
2831reset_insn_used_flags (rtx insn)
2832{
2833 gcc_assert (INSN_P (insn));
2834 reset_used_flags (PATTERN (insn));
2835 reset_used_flags (REG_NOTES (insn));
2836 if (CALL_P (insn))
2837 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2838}
2839
a24243a0 2840/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2841
a24243a0
AK
2842static void
2843reset_all_used_flags (void)
2c07f13b 2844{
dc01c3d1 2845 rtx_insn *p;
2c07f13b
JH
2846
2847 for (p = get_insns (); p; p = NEXT_INSN (p))
2848 if (INSN_P (p))
2849 {
0e0f87d4
SB
2850 rtx pat = PATTERN (p);
2851 if (GET_CODE (pat) != SEQUENCE)
2852 reset_insn_used_flags (p);
2853 else
2954a813 2854 {
0e0f87d4
SB
2855 gcc_assert (REG_NOTES (p) == NULL);
2856 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2857 {
2858 rtx insn = XVECEXP (pat, 0, i);
2859 if (INSN_P (insn))
2860 reset_insn_used_flags (insn);
2861 }
2954a813 2862 }
2c07f13b 2863 }
a24243a0
AK
2864}
2865
0e0f87d4
SB
2866/* Verify sharing in INSN. */
2867
2868static void
2869verify_insn_sharing (rtx insn)
2870{
2871 gcc_assert (INSN_P (insn));
4b498f72
JJ
2872 verify_rtx_sharing (PATTERN (insn), insn);
2873 verify_rtx_sharing (REG_NOTES (insn), insn);
0e0f87d4 2874 if (CALL_P (insn))
4b498f72 2875 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
0e0f87d4
SB
2876}
2877
a24243a0
AK
2878/* Go through all the RTL insn bodies and check that there is no unexpected
2879 sharing in between the subexpressions. */
2880
2881DEBUG_FUNCTION void
2882verify_rtl_sharing (void)
2883{
dc01c3d1 2884 rtx_insn *p;
a24243a0
AK
2885
2886 timevar_push (TV_VERIFY_RTL_SHARING);
2887
2888 reset_all_used_flags ();
2c07f13b
JH
2889
2890 for (p = get_insns (); p; p = NEXT_INSN (p))
2891 if (INSN_P (p))
2892 {
0e0f87d4
SB
2893 rtx pat = PATTERN (p);
2894 if (GET_CODE (pat) != SEQUENCE)
2895 verify_insn_sharing (p);
2896 else
2897 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2898 {
2899 rtx insn = XVECEXP (pat, 0, i);
2900 if (INSN_P (insn))
2901 verify_insn_sharing (insn);
2902 }
2c07f13b 2903 }
a222c01a 2904
a24243a0
AK
2905 reset_all_used_flags ();
2906
a222c01a 2907 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2908}
2909
d1b81779
GK
2910/* Go through all the RTL insn bodies and copy any invalid shared structure.
2911 Assumes the mark bits are cleared at entry. */
2912
2c07f13b 2913void
dc01c3d1 2914unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
2915{
2916 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2917 if (INSN_P (insn))
d1b81779
GK
2918 {
2919 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2920 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2921 if (CALL_P (insn))
2922 CALL_INSN_FUNCTION_USAGE (insn)
2923 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2924 }
2925}
2926
2d4aecb3 2927/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2928 shared. We never replace the DECL_RTLs themselves with a copy,
2929 but expressions mentioned into a DECL_RTL cannot be shared with
2930 expressions in the instruction stream.
2931
2932 Note that reload may convert pseudo registers into memories in-place.
2933 Pseudo registers are always shared, but MEMs never are. Thus if we
2934 reset the used flags on MEMs in the instruction stream, we must set
2935 them again on MEMs that appear in DECL_RTLs. */
2936
2d4aecb3 2937static void
5eb2a9f2 2938set_used_decls (tree blk)
2d4aecb3
AO
2939{
2940 tree t;
2941
2942 /* Mark decls. */
910ad8de 2943 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2944 if (DECL_RTL_SET_P (t))
5eb2a9f2 2945 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2946
2947 /* Now process sub-blocks. */
87caf699 2948 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2949 set_used_decls (t);
2d4aecb3
AO
2950}
2951
23b2ce53 2952/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2953 Recursively does the same for subexpressions. Uses
2954 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2955
2956rtx
502b8322 2957copy_rtx_if_shared (rtx orig)
23b2ce53 2958{
32b32b16
AP
2959 copy_rtx_if_shared_1 (&orig);
2960 return orig;
2961}
2962
ff954f39
AP
2963/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2964 use. Recursively does the same for subexpressions. */
2965
32b32b16
AP
2966static void
2967copy_rtx_if_shared_1 (rtx *orig1)
2968{
2969 rtx x;
b3694847
SS
2970 int i;
2971 enum rtx_code code;
32b32b16 2972 rtx *last_ptr;
b3694847 2973 const char *format_ptr;
23b2ce53 2974 int copied = 0;
32b32b16
AP
2975 int length;
2976
2977 /* Repeat is used to turn tail-recursion into iteration. */
2978repeat:
2979 x = *orig1;
23b2ce53
RS
2980
2981 if (x == 0)
32b32b16 2982 return;
23b2ce53
RS
2983
2984 code = GET_CODE (x);
2985
2986 /* These types may be freely shared. */
2987
2988 switch (code)
2989 {
2990 case REG:
0ca5af51
AO
2991 case DEBUG_EXPR:
2992 case VALUE:
d8116890 2993 CASE_CONST_ANY:
23b2ce53 2994 case SYMBOL_REF:
2c07f13b 2995 case LABEL_REF:
23b2ce53
RS
2996 case CODE_LABEL:
2997 case PC:
2998 case CC0:
276e0224 2999 case RETURN:
26898771 3000 case SIMPLE_RETURN:
23b2ce53 3001 case SCRATCH:
0f41302f 3002 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 3003 return;
3e89ed8d 3004 case CLOBBER:
c5c5ba89
JH
3005 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3006 clobbers or clobbers of hard registers that originated as pseudos.
3007 This is needed to allow safe register renaming. */
d7ae3739
EB
3008 if (REG_P (XEXP (x, 0))
3009 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3010 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3e89ed8d
JH
3011 return;
3012 break;
23b2ce53 3013
b851ea09 3014 case CONST:
6fb5fa3c 3015 if (shared_const_p (x))
32b32b16 3016 return;
b851ea09
RK
3017 break;
3018
b5b8b0ac 3019 case DEBUG_INSN:
23b2ce53
RS
3020 case INSN:
3021 case JUMP_INSN:
3022 case CALL_INSN:
3023 case NOTE:
23b2ce53
RS
3024 case BARRIER:
3025 /* The chain of insns is not being copied. */
32b32b16 3026 return;
23b2ce53 3027
e9a25f70
JL
3028 default:
3029 break;
23b2ce53
RS
3030 }
3031
3032 /* This rtx may not be shared. If it has already been seen,
3033 replace it with a copy of itself. */
3034
2adc7f12 3035 if (RTX_FLAG (x, used))
23b2ce53 3036 {
aacd3885 3037 x = shallow_copy_rtx (x);
23b2ce53
RS
3038 copied = 1;
3039 }
2adc7f12 3040 RTX_FLAG (x, used) = 1;
23b2ce53
RS
3041
3042 /* Now scan the subexpressions recursively.
3043 We can store any replaced subexpressions directly into X
3044 since we know X is not shared! Any vectors in X
3045 must be copied if X was copied. */
3046
3047 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
3048 length = GET_RTX_LENGTH (code);
3049 last_ptr = NULL;
b8698a0f 3050
32b32b16 3051 for (i = 0; i < length; i++)
23b2ce53
RS
3052 {
3053 switch (*format_ptr++)
3054 {
3055 case 'e':
32b32b16
AP
3056 if (last_ptr)
3057 copy_rtx_if_shared_1 (last_ptr);
3058 last_ptr = &XEXP (x, i);
23b2ce53
RS
3059 break;
3060
3061 case 'E':
3062 if (XVEC (x, i) != NULL)
3063 {
b3694847 3064 int j;
f0722107 3065 int len = XVECLEN (x, i);
b8698a0f 3066
6614fd40
KH
3067 /* Copy the vector iff I copied the rtx and the length
3068 is nonzero. */
f0722107 3069 if (copied && len > 0)
8f985ec4 3070 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 3071
5d3cc252 3072 /* Call recursively on all inside the vector. */
f0722107 3073 for (j = 0; j < len; j++)
32b32b16
AP
3074 {
3075 if (last_ptr)
3076 copy_rtx_if_shared_1 (last_ptr);
3077 last_ptr = &XVECEXP (x, i, j);
3078 }
23b2ce53
RS
3079 }
3080 break;
3081 }
3082 }
32b32b16
AP
3083 *orig1 = x;
3084 if (last_ptr)
3085 {
3086 orig1 = last_ptr;
3087 goto repeat;
3088 }
3089 return;
23b2ce53
RS
3090}
3091
76369a82 3092/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 3093
76369a82
NF
3094static void
3095mark_used_flags (rtx x, int flag)
23b2ce53 3096{
b3694847
SS
3097 int i, j;
3098 enum rtx_code code;
3099 const char *format_ptr;
32b32b16 3100 int length;
23b2ce53 3101
32b32b16
AP
3102 /* Repeat is used to turn tail-recursion into iteration. */
3103repeat:
23b2ce53
RS
3104 if (x == 0)
3105 return;
3106
3107 code = GET_CODE (x);
3108
9faa82d8 3109 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3110 for them. */
3111
3112 switch (code)
3113 {
3114 case REG:
0ca5af51
AO
3115 case DEBUG_EXPR:
3116 case VALUE:
d8116890 3117 CASE_CONST_ANY:
23b2ce53
RS
3118 case SYMBOL_REF:
3119 case CODE_LABEL:
3120 case PC:
3121 case CC0:
276e0224 3122 case RETURN:
26898771 3123 case SIMPLE_RETURN:
23b2ce53
RS
3124 return;
3125
b5b8b0ac 3126 case DEBUG_INSN:
23b2ce53
RS
3127 case INSN:
3128 case JUMP_INSN:
3129 case CALL_INSN:
3130 case NOTE:
3131 case LABEL_REF:
3132 case BARRIER:
3133 /* The chain of insns is not being copied. */
3134 return;
750c9258 3135
e9a25f70
JL
3136 default:
3137 break;
23b2ce53
RS
3138 }
3139
76369a82 3140 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3141
3142 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3143 length = GET_RTX_LENGTH (code);
b8698a0f 3144
32b32b16 3145 for (i = 0; i < length; i++)
23b2ce53
RS
3146 {
3147 switch (*format_ptr++)
3148 {
3149 case 'e':
32b32b16
AP
3150 if (i == length-1)
3151 {
3152 x = XEXP (x, i);
3153 goto repeat;
3154 }
76369a82 3155 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3156 break;
3157
3158 case 'E':
3159 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3160 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3161 break;
3162 }
3163 }
3164}
2c07f13b 3165
76369a82 3166/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3167 to look for shared sub-parts. */
3168
3169void
76369a82 3170reset_used_flags (rtx x)
2c07f13b 3171{
76369a82
NF
3172 mark_used_flags (x, 0);
3173}
2c07f13b 3174
76369a82
NF
3175/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3176 to look for shared sub-parts. */
2c07f13b 3177
76369a82
NF
3178void
3179set_used_flags (rtx x)
3180{
3181 mark_used_flags (x, 1);
2c07f13b 3182}
23b2ce53
RS
3183\f
3184/* Copy X if necessary so that it won't be altered by changes in OTHER.
3185 Return X or the rtx for the pseudo reg the value of X was copied into.
3186 OTHER must be valid as a SET_DEST. */
3187
3188rtx
502b8322 3189make_safe_from (rtx x, rtx other)
23b2ce53
RS
3190{
3191 while (1)
3192 switch (GET_CODE (other))
3193 {
3194 case SUBREG:
3195 other = SUBREG_REG (other);
3196 break;
3197 case STRICT_LOW_PART:
3198 case SIGN_EXTEND:
3199 case ZERO_EXTEND:
3200 other = XEXP (other, 0);
3201 break;
3202 default:
3203 goto done;
3204 }
3205 done:
3c0cb5de 3206 if ((MEM_P (other)
23b2ce53 3207 && ! CONSTANT_P (x)
f8cfc6aa 3208 && !REG_P (x)
23b2ce53 3209 && GET_CODE (x) != SUBREG)
f8cfc6aa 3210 || (REG_P (other)
23b2ce53
RS
3211 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3212 || reg_mentioned_p (other, x))))
3213 {
3214 rtx temp = gen_reg_rtx (GET_MODE (x));
3215 emit_move_insn (temp, x);
3216 return temp;
3217 }
3218 return x;
3219}
3220\f
3221/* Emission of insns (adding them to the doubly-linked list). */
3222
23b2ce53
RS
3223/* Return the last insn emitted, even if it is in a sequence now pushed. */
3224
db76cf1e 3225rtx_insn *
502b8322 3226get_last_insn_anywhere (void)
23b2ce53 3227{
614d5bd8
AM
3228 struct sequence_stack *seq;
3229 for (seq = get_current_sequence (); seq; seq = seq->next)
3230 if (seq->last != 0)
3231 return seq->last;
23b2ce53
RS
3232 return 0;
3233}
3234
2a496e8b
JDA
3235/* Return the first nonnote insn emitted in current sequence or current
3236 function. This routine looks inside SEQUENCEs. */
3237
e4685bc8 3238rtx_insn *
502b8322 3239get_first_nonnote_insn (void)
2a496e8b 3240{
dc01c3d1 3241 rtx_insn *insn = get_insns ();
91373fe8
JDA
3242
3243 if (insn)
3244 {
3245 if (NOTE_P (insn))
3246 for (insn = next_insn (insn);
3247 insn && NOTE_P (insn);
3248 insn = next_insn (insn))
3249 continue;
3250 else
3251 {
2ca202e7 3252 if (NONJUMP_INSN_P (insn)
91373fe8 3253 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3254 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3255 }
3256 }
2a496e8b
JDA
3257
3258 return insn;
3259}
3260
3261/* Return the last nonnote insn emitted in current sequence or current
3262 function. This routine looks inside SEQUENCEs. */
3263
e4685bc8 3264rtx_insn *
502b8322 3265get_last_nonnote_insn (void)
2a496e8b 3266{
dc01c3d1 3267 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3268
3269 if (insn)
3270 {
3271 if (NOTE_P (insn))
3272 for (insn = previous_insn (insn);
3273 insn && NOTE_P (insn);
3274 insn = previous_insn (insn))
3275 continue;
3276 else
3277 {
dc01c3d1
DM
3278 if (NONJUMP_INSN_P (insn))
3279 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3280 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3281 }
3282 }
2a496e8b
JDA
3283
3284 return insn;
3285}
3286
b5b8b0ac
AO
3287/* Return the number of actual (non-debug) insns emitted in this
3288 function. */
3289
3290int
3291get_max_insn_count (void)
3292{
3293 int n = cur_insn_uid;
3294
3295 /* The table size must be stable across -g, to avoid codegen
3296 differences due to debug insns, and not be affected by
3297 -fmin-insn-uid, to avoid excessive table size and to simplify
3298 debugging of -fcompare-debug failures. */
3299 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3300 n -= cur_debug_insn_uid;
3301 else
3302 n -= MIN_NONDEBUG_INSN_UID;
3303
3304 return n;
3305}
3306
23b2ce53
RS
3307\f
3308/* Return the next insn. If it is a SEQUENCE, return the first insn
3309 of the sequence. */
3310
eb51c837 3311rtx_insn *
4ce524a1 3312next_insn (rtx_insn *insn)
23b2ce53 3313{
75547801
KG
3314 if (insn)
3315 {
3316 insn = NEXT_INSN (insn);
3317 if (insn && NONJUMP_INSN_P (insn)
3318 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3319 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3320 }
23b2ce53 3321
dc01c3d1 3322 return insn;
23b2ce53
RS
3323}
3324
3325/* Return the previous insn. If it is a SEQUENCE, return the last insn
3326 of the sequence. */
3327
eb51c837 3328rtx_insn *
4ce524a1 3329previous_insn (rtx_insn *insn)
23b2ce53 3330{
75547801
KG
3331 if (insn)
3332 {
3333 insn = PREV_INSN (insn);
dc01c3d1
DM
3334 if (insn && NONJUMP_INSN_P (insn))
3335 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3336 insn = seq->insn (seq->len () - 1);
75547801 3337 }
23b2ce53 3338
dc01c3d1 3339 return insn;
23b2ce53
RS
3340}
3341
3342/* Return the next insn after INSN that is not a NOTE. This routine does not
3343 look inside SEQUENCEs. */
3344
eb51c837 3345rtx_insn *
c9b0a227 3346next_nonnote_insn (rtx_insn *insn)
23b2ce53 3347{
75547801
KG
3348 while (insn)
3349 {
3350 insn = NEXT_INSN (insn);
3351 if (insn == 0 || !NOTE_P (insn))
3352 break;
3353 }
23b2ce53 3354
dc01c3d1 3355 return insn;
23b2ce53
RS
3356}
3357
1e211590
DD
3358/* Return the next insn after INSN that is not a NOTE, but stop the
3359 search before we enter another basic block. This routine does not
3360 look inside SEQUENCEs. */
3361
eb51c837 3362rtx_insn *
e4685bc8 3363next_nonnote_insn_bb (rtx_insn *insn)
1e211590
DD
3364{
3365 while (insn)
3366 {
3367 insn = NEXT_INSN (insn);
3368 if (insn == 0 || !NOTE_P (insn))
3369 break;
3370 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3371 return NULL;
1e211590
DD
3372 }
3373
dc01c3d1 3374 return insn;
1e211590
DD
3375}
3376
23b2ce53
RS
3377/* Return the previous insn before INSN that is not a NOTE. This routine does
3378 not look inside SEQUENCEs. */
3379
eb51c837 3380rtx_insn *
c9b0a227 3381prev_nonnote_insn (rtx_insn *insn)
23b2ce53 3382{
75547801
KG
3383 while (insn)
3384 {
3385 insn = PREV_INSN (insn);
3386 if (insn == 0 || !NOTE_P (insn))
3387 break;
3388 }
23b2ce53 3389
dc01c3d1 3390 return insn;
23b2ce53
RS
3391}
3392
896aa4ea
DD
3393/* Return the previous insn before INSN that is not a NOTE, but stop
3394 the search before we enter another basic block. This routine does
3395 not look inside SEQUENCEs. */
3396
eb51c837 3397rtx_insn *
9815687d 3398prev_nonnote_insn_bb (rtx_insn *insn)
896aa4ea 3399{
dc01c3d1 3400
896aa4ea
DD
3401 while (insn)
3402 {
3403 insn = PREV_INSN (insn);
3404 if (insn == 0 || !NOTE_P (insn))
3405 break;
3406 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3407 return NULL;
896aa4ea
DD
3408 }
3409
dc01c3d1 3410 return insn;
896aa4ea
DD
3411}
3412
b5b8b0ac
AO
3413/* Return the next insn after INSN that is not a DEBUG_INSN. This
3414 routine does not look inside SEQUENCEs. */
3415
eb51c837 3416rtx_insn *
30d2ef86 3417next_nondebug_insn (rtx_insn *insn)
b5b8b0ac
AO
3418{
3419 while (insn)
3420 {
3421 insn = NEXT_INSN (insn);
3422 if (insn == 0 || !DEBUG_INSN_P (insn))
3423 break;
3424 }
3425
dc01c3d1 3426 return insn;
b5b8b0ac
AO
3427}
3428
3429/* Return the previous insn before INSN that is not a DEBUG_INSN.
3430 This routine does not look inside SEQUENCEs. */
3431
eb51c837 3432rtx_insn *
30d2ef86 3433prev_nondebug_insn (rtx_insn *insn)
b5b8b0ac
AO
3434{
3435 while (insn)
3436 {
3437 insn = PREV_INSN (insn);
3438 if (insn == 0 || !DEBUG_INSN_P (insn))
3439 break;
3440 }
3441
dc01c3d1 3442 return insn;
b5b8b0ac
AO
3443}
3444
f0fc0803
JJ
3445/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3446 This routine does not look inside SEQUENCEs. */
3447
eb51c837 3448rtx_insn *
1f00691e 3449next_nonnote_nondebug_insn (rtx_insn *insn)
f0fc0803
JJ
3450{
3451 while (insn)
3452 {
3453 insn = NEXT_INSN (insn);
3454 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3455 break;
3456 }
3457
dc01c3d1 3458 return insn;
f0fc0803
JJ
3459}
3460
3461/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3462 This routine does not look inside SEQUENCEs. */
3463
eb51c837 3464rtx_insn *
1f00691e 3465prev_nonnote_nondebug_insn (rtx_insn *insn)
f0fc0803
JJ
3466{
3467 while (insn)
3468 {
3469 insn = PREV_INSN (insn);
3470 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3471 break;
3472 }
3473
dc01c3d1 3474 return insn;
f0fc0803
JJ
3475}
3476
23b2ce53
RS
3477/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3478 or 0, if there is none. This routine does not look inside
0f41302f 3479 SEQUENCEs. */
23b2ce53 3480
eb51c837 3481rtx_insn *
dc01c3d1 3482next_real_insn (rtx uncast_insn)
23b2ce53 3483{
dc01c3d1
DM
3484 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3485
75547801
KG
3486 while (insn)
3487 {
3488 insn = NEXT_INSN (insn);
3489 if (insn == 0 || INSN_P (insn))
3490 break;
3491 }
23b2ce53 3492
dc01c3d1 3493 return insn;
23b2ce53
RS
3494}
3495
3496/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3497 or 0, if there is none. This routine does not look inside
3498 SEQUENCEs. */
3499
eb51c837 3500rtx_insn *
d8fd56b2 3501prev_real_insn (rtx_insn *insn)
23b2ce53 3502{
75547801
KG
3503 while (insn)
3504 {
3505 insn = PREV_INSN (insn);
3506 if (insn == 0 || INSN_P (insn))
3507 break;
3508 }
23b2ce53 3509
dc01c3d1 3510 return insn;
23b2ce53
RS
3511}
3512
ee960939
OH
3513/* Return the last CALL_INSN in the current list, or 0 if there is none.
3514 This routine does not look inside SEQUENCEs. */
3515
049cfc4a 3516rtx_call_insn *
502b8322 3517last_call_insn (void)
ee960939 3518{
049cfc4a 3519 rtx_insn *insn;
ee960939
OH
3520
3521 for (insn = get_last_insn ();
4b4bf941 3522 insn && !CALL_P (insn);
ee960939
OH
3523 insn = PREV_INSN (insn))
3524 ;
3525
049cfc4a 3526 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3527}
3528
23b2ce53 3529/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3530 does not look inside SEQUENCEs. After reload this also skips over
3531 standalone USE and CLOBBER insn. */
23b2ce53 3532
69732dcb 3533int
7c9796ed 3534active_insn_p (const rtx_insn *insn)
69732dcb 3535{
4b4bf941 3536 return (CALL_P (insn) || JUMP_P (insn)
39718607 3537 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3538 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3539 && (! reload_completed
3540 || (GET_CODE (PATTERN (insn)) != USE
3541 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3542}
3543
eb51c837 3544rtx_insn *
7c9796ed 3545next_active_insn (rtx_insn *insn)
23b2ce53 3546{
75547801
KG
3547 while (insn)
3548 {
3549 insn = NEXT_INSN (insn);
3550 if (insn == 0 || active_insn_p (insn))
3551 break;
3552 }
23b2ce53 3553
dc01c3d1 3554 return insn;
23b2ce53
RS
3555}
3556
3557/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3558 does not look inside SEQUENCEs. After reload this also skips over
3559 standalone USE and CLOBBER insn. */
23b2ce53 3560
eb51c837 3561rtx_insn *
7c9796ed 3562prev_active_insn (rtx_insn *insn)
23b2ce53 3563{
75547801
KG
3564 while (insn)
3565 {
3566 insn = PREV_INSN (insn);
3567 if (insn == 0 || active_insn_p (insn))
3568 break;
3569 }
23b2ce53 3570
dc01c3d1 3571 return insn;
23b2ce53 3572}
23b2ce53 3573\f
23b2ce53
RS
3574/* Return the next insn that uses CC0 after INSN, which is assumed to
3575 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3576 applied to the result of this function should yield INSN).
3577
3578 Normally, this is simply the next insn. However, if a REG_CC_USER note
3579 is present, it contains the insn that uses CC0.
3580
3581 Return 0 if we can't find the insn. */
3582
75b46023 3583rtx_insn *
475edec0 3584next_cc0_user (rtx_insn *insn)
23b2ce53 3585{
906c4e36 3586 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3587
3588 if (note)
75b46023 3589 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3590
3591 insn = next_nonnote_insn (insn);
4b4bf941 3592 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3593 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3594
2c3c49de 3595 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3596 return insn;
23b2ce53
RS
3597
3598 return 0;
3599}
3600
3601/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3602 note, it is the previous insn. */
3603
75b46023 3604rtx_insn *
5c8db5b4 3605prev_cc0_setter (rtx_insn *insn)
23b2ce53 3606{
906c4e36 3607 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3608
3609 if (note)
75b46023 3610 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3611
3612 insn = prev_nonnote_insn (insn);
5b0264cb 3613 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3614
dc01c3d1 3615 return insn;
23b2ce53 3616}
e5bef2e4 3617
594f8779
RZ
3618/* Find a RTX_AUTOINC class rtx which matches DATA. */
3619
3620static int
9021b8ec 3621find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3622{
9021b8ec
RS
3623 subrtx_iterator::array_type array;
3624 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3625 {
9021b8ec
RS
3626 const_rtx x = *iter;
3627 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3628 && rtx_equal_p (reg, XEXP (x, 0)))
3629 return true;
594f8779 3630 }
9021b8ec 3631 return false;
594f8779 3632}
594f8779 3633
e5bef2e4
HB
3634/* Increment the label uses for all labels present in rtx. */
3635
3636static void
502b8322 3637mark_label_nuses (rtx x)
e5bef2e4 3638{
b3694847
SS
3639 enum rtx_code code;
3640 int i, j;
3641 const char *fmt;
e5bef2e4
HB
3642
3643 code = GET_CODE (x);
04a121a7
TS
3644 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3645 LABEL_NUSES (label_ref_label (x))++;
e5bef2e4
HB
3646
3647 fmt = GET_RTX_FORMAT (code);
3648 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3649 {
3650 if (fmt[i] == 'e')
0fb7aeda 3651 mark_label_nuses (XEXP (x, i));
e5bef2e4 3652 else if (fmt[i] == 'E')
0fb7aeda 3653 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3654 mark_label_nuses (XVECEXP (x, i, j));
3655 }
3656}
3657
23b2ce53
RS
3658\f
3659/* Try splitting insns that can be split for better scheduling.
3660 PAT is the pattern which might split.
3661 TRIAL is the insn providing PAT.
cc2902df 3662 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3663
3664 If this routine succeeds in splitting, it returns the first or last
11147ebe 3665 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3666 returns TRIAL. If the insn to be returned can be split, it will be. */
3667
53f04688 3668rtx_insn *
bb5c4956 3669try_split (rtx pat, rtx_insn *trial, int last)
23b2ce53 3670{
d4eff95b 3671 rtx_insn *before, *after;
dc01c3d1
DM
3672 rtx note;
3673 rtx_insn *seq, *tem;
5fa396ad 3674 profile_probability probability;
dc01c3d1 3675 rtx_insn *insn_last, *insn;
599aedd9 3676 int njumps = 0;
e67d1102 3677 rtx_insn *call_insn = NULL;
6b24c259 3678
cd9c1ca8
RH
3679 /* We're not good at redistributing frame information. */
3680 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3681 return trial;
cd9c1ca8 3682
6b24c259
JH
3683 if (any_condjump_p (trial)
3684 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
5fa396ad
JH
3685 split_branch_probability
3686 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3687 else
3688 split_branch_probability = profile_probability::uninitialized ();
3689
6b24c259
JH
3690 probability = split_branch_probability;
3691
bb5c4956 3692 seq = split_insns (pat, trial);
6b24c259 3693
5fa396ad 3694 split_branch_probability = profile_probability::uninitialized ();
23b2ce53 3695
599aedd9 3696 if (!seq)
dc01c3d1 3697 return trial;
599aedd9
RH
3698
3699 /* Avoid infinite loop if any insn of the result matches
3700 the original pattern. */
3701 insn_last = seq;
3702 while (1)
23b2ce53 3703 {
599aedd9
RH
3704 if (INSN_P (insn_last)
3705 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3706 return trial;
599aedd9
RH
3707 if (!NEXT_INSN (insn_last))
3708 break;
3709 insn_last = NEXT_INSN (insn_last);
3710 }
750c9258 3711
6fb5fa3c
DB
3712 /* We will be adding the new sequence to the function. The splitters
3713 may have introduced invalid RTL sharing, so unshare the sequence now. */
3714 unshare_all_rtl_in_chain (seq);
3715
339ba33b 3716 /* Mark labels and copy flags. */
599aedd9
RH
3717 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3718 {
4b4bf941 3719 if (JUMP_P (insn))
599aedd9 3720 {
339ba33b
RS
3721 if (JUMP_P (trial))
3722 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3723 mark_jump_label (PATTERN (insn), insn, 0);
3724 njumps++;
5fa396ad 3725 if (probability.initialized_p ()
599aedd9
RH
3726 && any_condjump_p (insn)
3727 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3728 {
599aedd9
RH
3729 /* We can preserve the REG_BR_PROB notes only if exactly
3730 one jump is created, otherwise the machine description
3731 is responsible for this step using
3732 split_branch_probability variable. */
5b0264cb 3733 gcc_assert (njumps == 1);
5fa396ad 3734 add_reg_br_prob_note (insn, probability);
2f937369 3735 }
599aedd9
RH
3736 }
3737 }
3738
3739 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3740 in SEQ and copy any additional information across. */
4b4bf941 3741 if (CALL_P (trial))
599aedd9
RH
3742 {
3743 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3744 if (CALL_P (insn))
599aedd9 3745 {
dc01c3d1
DM
3746 rtx_insn *next;
3747 rtx *p;
65712d5c 3748
4f660b15
RO
3749 gcc_assert (call_insn == NULL_RTX);
3750 call_insn = insn;
3751
65712d5c
RS
3752 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3753 target may have explicitly specified. */
3754 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3755 while (*p)
3756 p = &XEXP (*p, 1);
3757 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3758
3759 /* If the old call was a sibling call, the new one must
3760 be too. */
599aedd9 3761 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3762
3763 /* If the new call is the last instruction in the sequence,
3764 it will effectively replace the old call in-situ. Otherwise
3765 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3766 so that it comes immediately after the new call. */
3767 if (NEXT_INSN (insn))
65f3dedb
RS
3768 for (next = NEXT_INSN (trial);
3769 next && NOTE_P (next);
3770 next = NEXT_INSN (next))
3771 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3772 {
3773 remove_insn (next);
3774 add_insn_after (next, insn, NULL);
65f3dedb 3775 break;
65712d5c 3776 }
599aedd9
RH
3777 }
3778 }
4b5e8abe 3779
599aedd9
RH
3780 /* Copy notes, particularly those related to the CFG. */
3781 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3782 {
3783 switch (REG_NOTE_KIND (note))
3784 {
3785 case REG_EH_REGION:
1d65f45c 3786 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3787 break;
216183ce 3788
599aedd9
RH
3789 case REG_NORETURN:
3790 case REG_SETJMP:
0a35513e 3791 case REG_TM:
5c5f0b65 3792 case REG_CALL_NOCF_CHECK:
594f8779 3793 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3794 {
4b4bf941 3795 if (CALL_P (insn))
65c5f2a6 3796 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3797 }
599aedd9 3798 break;
d6e95df8 3799
599aedd9 3800 case REG_NON_LOCAL_GOTO:
594f8779 3801 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3802 {
4b4bf941 3803 if (JUMP_P (insn))
65c5f2a6 3804 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3805 }
599aedd9 3806 break;
e5bef2e4 3807
594f8779 3808 case REG_INC:
760edf20
TS
3809 if (!AUTO_INC_DEC)
3810 break;
3811
594f8779
RZ
3812 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3813 {
3814 rtx reg = XEXP (note, 0);
3815 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3816 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3817 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3818 }
3819 break;
594f8779 3820
9a08d230 3821 case REG_ARGS_SIZE:
e5b51ca0 3822 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
9a08d230
RH
3823 break;
3824
4f660b15
RO
3825 case REG_CALL_DECL:
3826 gcc_assert (call_insn != NULL_RTX);
3827 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3828 break;
3829
599aedd9
RH
3830 default:
3831 break;
23b2ce53 3832 }
599aedd9
RH
3833 }
3834
3835 /* If there are LABELS inside the split insns increment the
3836 usage count so we don't delete the label. */
cf7c4aa6 3837 if (INSN_P (trial))
599aedd9
RH
3838 {
3839 insn = insn_last;
3840 while (insn != NULL_RTX)
23b2ce53 3841 {
cf7c4aa6 3842 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3843 if (NONJUMP_INSN_P (insn))
599aedd9 3844 mark_label_nuses (PATTERN (insn));
23b2ce53 3845
599aedd9
RH
3846 insn = PREV_INSN (insn);
3847 }
23b2ce53
RS
3848 }
3849
d4eff95b
JC
3850 before = PREV_INSN (trial);
3851 after = NEXT_INSN (trial);
3852
5368224f 3853 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3854
3855 delete_insn (trial);
599aedd9
RH
3856
3857 /* Recursively call try_split for each new insn created; by the
3858 time control returns here that insn will be fully split, so
3859 set LAST and continue from the insn after the one returned.
3860 We can't use next_active_insn here since AFTER may be a note.
3861 Ignore deleted insns, which can be occur if not optimizing. */
3862 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4654c0cf 3863 if (! tem->deleted () && INSN_P (tem))
599aedd9
RH
3864 tem = try_split (PATTERN (tem), tem, 1);
3865
3866 /* Return either the first or the last insn, depending on which was
3867 requested. */
3868 return last
5936d944 3869 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3870 : NEXT_INSN (before);
23b2ce53
RS
3871}
3872\f
3873/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3874 Store PATTERN in the pattern slots. */
23b2ce53 3875
167b9fae 3876rtx_insn *
502b8322 3877make_insn_raw (rtx pattern)
23b2ce53 3878{
167b9fae 3879 rtx_insn *insn;
23b2ce53 3880
167b9fae 3881 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 3882
43127294 3883 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3884 PATTERN (insn) = pattern;
3885 INSN_CODE (insn) = -1;
1632afca 3886 REG_NOTES (insn) = NULL;
5368224f 3887 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3888 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3889
47984720
NC
3890#ifdef ENABLE_RTL_CHECKING
3891 if (insn
2c3c49de 3892 && INSN_P (insn)
47984720
NC
3893 && (returnjump_p (insn)
3894 || (GET_CODE (insn) == SET
3895 && SET_DEST (insn) == pc_rtx)))
3896 {
d4ee4d25 3897 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3898 debug_rtx (insn);
3899 }
3900#endif
750c9258 3901
23b2ce53
RS
3902 return insn;
3903}
3904
b5b8b0ac
AO
3905/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3906
167b9fae 3907static rtx_insn *
b5b8b0ac
AO
3908make_debug_insn_raw (rtx pattern)
3909{
167b9fae 3910 rtx_debug_insn *insn;
b5b8b0ac 3911
167b9fae 3912 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
3913 INSN_UID (insn) = cur_debug_insn_uid++;
3914 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3915 INSN_UID (insn) = cur_insn_uid++;
3916
3917 PATTERN (insn) = pattern;
3918 INSN_CODE (insn) = -1;
3919 REG_NOTES (insn) = NULL;
5368224f 3920 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3921 BLOCK_FOR_INSN (insn) = NULL;
3922
3923 return insn;
3924}
3925
2f937369 3926/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3927
167b9fae 3928static rtx_insn *
502b8322 3929make_jump_insn_raw (rtx pattern)
23b2ce53 3930{
167b9fae 3931 rtx_jump_insn *insn;
23b2ce53 3932
167b9fae 3933 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 3934 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3935
3936 PATTERN (insn) = pattern;
3937 INSN_CODE (insn) = -1;
1632afca
RS
3938 REG_NOTES (insn) = NULL;
3939 JUMP_LABEL (insn) = NULL;
5368224f 3940 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3941 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3942
3943 return insn;
3944}
aff507f4 3945
2f937369 3946/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 3947
167b9fae 3948static rtx_insn *
502b8322 3949make_call_insn_raw (rtx pattern)
aff507f4 3950{
167b9fae 3951 rtx_call_insn *insn;
aff507f4 3952
167b9fae 3953 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
3954 INSN_UID (insn) = cur_insn_uid++;
3955
3956 PATTERN (insn) = pattern;
3957 INSN_CODE (insn) = -1;
aff507f4
RK
3958 REG_NOTES (insn) = NULL;
3959 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3960 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3961 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3962
3963 return insn;
3964}
96fba521
SB
3965
3966/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3967
66e8df53 3968static rtx_note *
96fba521
SB
3969make_note_raw (enum insn_note subtype)
3970{
3971 /* Some notes are never created this way at all. These notes are
3972 only created by patching out insns. */
3973 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3974 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3975
66e8df53 3976 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
3977 INSN_UID (note) = cur_insn_uid++;
3978 NOTE_KIND (note) = subtype;
3979 BLOCK_FOR_INSN (note) = NULL;
3980 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3981 return note;
3982}
23b2ce53 3983\f
96fba521
SB
3984/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3985 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3986 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3987
3988static inline void
9152e0aa 3989link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 3990{
0f82e5c9
DM
3991 SET_PREV_INSN (insn) = prev;
3992 SET_NEXT_INSN (insn) = next;
96fba521
SB
3993 if (prev != NULL)
3994 {
0f82e5c9 3995 SET_NEXT_INSN (prev) = insn;
96fba521
SB
3996 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3997 {
e6eda746
DM
3998 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3999 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
4000 }
4001 }
4002 if (next != NULL)
4003 {
0f82e5c9 4004 SET_PREV_INSN (next) = insn;
96fba521 4005 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4006 {
4007 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4008 SET_PREV_INSN (sequence->insn (0)) = insn;
4009 }
96fba521 4010 }
3ccb989e
SB
4011
4012 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4013 {
e6eda746
DM
4014 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4015 SET_PREV_INSN (sequence->insn (0)) = prev;
4016 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 4017 }
96fba521
SB
4018}
4019
23b2ce53
RS
4020/* Add INSN to the end of the doubly-linked list.
4021 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4022
4023void
9152e0aa 4024add_insn (rtx_insn *insn)
23b2ce53 4025{
9152e0aa 4026 rtx_insn *prev = get_last_insn ();
96fba521 4027 link_insn_into_chain (insn, prev, NULL);
5936d944
JH
4028 if (NULL == get_insns ())
4029 set_first_insn (insn);
5936d944 4030 set_last_insn (insn);
23b2ce53
RS
4031}
4032
96fba521 4033/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 4034
96fba521 4035static void
9152e0aa 4036add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 4037{
9152e0aa 4038 rtx_insn *next = NEXT_INSN (after);
23b2ce53 4039
4654c0cf 4040 gcc_assert (!optimize || !after->deleted ());
ba213285 4041
96fba521 4042 link_insn_into_chain (insn, after, next);
23b2ce53 4043
96fba521 4044 if (next == NULL)
23b2ce53 4045 {
614d5bd8
AM
4046 struct sequence_stack *seq;
4047
4048 for (seq = get_current_sequence (); seq; seq = seq->next)
4049 if (after == seq->last)
4050 {
4051 seq->last = insn;
4052 break;
4053 }
23b2ce53 4054 }
96fba521
SB
4055}
4056
4057/* Add INSN into the doubly-linked list before insn BEFORE. */
4058
4059static void
9152e0aa 4060add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4061{
9152e0aa 4062 rtx_insn *prev = PREV_INSN (before);
96fba521 4063
4654c0cf 4064 gcc_assert (!optimize || !before->deleted ());
96fba521
SB
4065
4066 link_insn_into_chain (insn, prev, before);
4067
4068 if (prev == NULL)
23b2ce53 4069 {
614d5bd8 4070 struct sequence_stack *seq;
a0ae8e8d 4071
614d5bd8
AM
4072 for (seq = get_current_sequence (); seq; seq = seq->next)
4073 if (before == seq->first)
4074 {
4075 seq->first = insn;
4076 break;
4077 }
4078
4079 gcc_assert (seq);
23b2ce53 4080 }
96fba521
SB
4081}
4082
4083/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4084 If BB is NULL, an attempt is made to infer the bb from before.
4085
4086 This and the next function should be the only functions called
4087 to insert an insn once delay slots have been filled since only
4088 they know how to update a SEQUENCE. */
23b2ce53 4089
96fba521 4090void
9152e0aa 4091add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
96fba521 4092{
1130d5e3 4093 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
9152e0aa 4094 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
96fba521 4095 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4096 if (!BARRIER_P (after)
4097 && !BARRIER_P (insn)
3c030e88
JH
4098 && (bb = BLOCK_FOR_INSN (after)))
4099 {
4100 set_block_for_insn (insn, bb);
38c1593d 4101 if (INSN_P (insn))
6fb5fa3c 4102 df_insn_rescan (insn);
3c030e88 4103 /* Should not happen as first in the BB is always
a1f300c0 4104 either NOTE or LABEL. */
a813c111 4105 if (BB_END (bb) == after
3c030e88 4106 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4107 && !BARRIER_P (insn)
a38e7aa5 4108 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4109 BB_END (bb) = insn;
3c030e88 4110 }
23b2ce53
RS
4111}
4112
96fba521
SB
4113/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4114 If BB is NULL, an attempt is made to infer the bb from before.
4115
4116 This and the previous function should be the only functions called
4117 to insert an insn once delay slots have been filled since only
4118 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4119
4120void
9152e0aa 4121add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
a0ae8e8d 4122{
9152e0aa
DM
4123 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4124 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
96fba521 4125 add_insn_before_nobb (insn, before);
a0ae8e8d 4126
b8698a0f 4127 if (!bb
6fb5fa3c
DB
4128 && !BARRIER_P (before)
4129 && !BARRIER_P (insn))
4130 bb = BLOCK_FOR_INSN (before);
4131
4132 if (bb)
3c030e88
JH
4133 {
4134 set_block_for_insn (insn, bb);
38c1593d 4135 if (INSN_P (insn))
6fb5fa3c 4136 df_insn_rescan (insn);
5b0264cb 4137 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4138 LABEL. */
5b0264cb
NS
4139 gcc_assert (BB_HEAD (bb) != insn
4140 /* Avoid clobbering of structure when creating new BB. */
4141 || BARRIER_P (insn)
a38e7aa5 4142 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4143 }
a0ae8e8d
RK
4144}
4145
6fb5fa3c
DB
4146/* Replace insn with an deleted instruction note. */
4147
0ce2b299
EB
4148void
4149set_insn_deleted (rtx insn)
6fb5fa3c 4150{
39718607 4151 if (INSN_P (insn))
b2908ba6 4152 df_insn_delete (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
4153 PUT_CODE (insn, NOTE);
4154 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4155}
4156
4157
1f397f45
SB
4158/* Unlink INSN from the insn chain.
4159
4160 This function knows how to handle sequences.
4161
4162 This function does not invalidate data flow information associated with
4163 INSN (i.e. does not call df_insn_delete). That makes this function
4164 usable for only disconnecting an insn from the chain, and re-emit it
4165 elsewhere later.
4166
4167 To later insert INSN elsewhere in the insn chain via add_insn and
4168 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4169 the caller. Nullifying them here breaks many insn chain walks.
4170
4171 To really delete an insn and related DF information, use delete_insn. */
4172
89e99eea 4173void
dc01c3d1 4174remove_insn (rtx uncast_insn)
89e99eea 4175{
dc01c3d1 4176 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1130d5e3
DM
4177 rtx_insn *next = NEXT_INSN (insn);
4178 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4179 basic_block bb;
4180
89e99eea
DB
4181 if (prev)
4182 {
0f82e5c9 4183 SET_NEXT_INSN (prev) = next;
4b4bf941 4184 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4185 {
e6eda746
DM
4186 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4187 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4188 }
4189 }
89e99eea
DB
4190 else
4191 {
614d5bd8
AM
4192 struct sequence_stack *seq;
4193
4194 for (seq = get_current_sequence (); seq; seq = seq->next)
4195 if (insn == seq->first)
89e99eea 4196 {
614d5bd8 4197 seq->first = next;
89e99eea
DB
4198 break;
4199 }
4200
614d5bd8 4201 gcc_assert (seq);
89e99eea
DB
4202 }
4203
4204 if (next)
4205 {
0f82e5c9 4206 SET_PREV_INSN (next) = prev;
4b4bf941 4207 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4208 {
4209 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4210 SET_PREV_INSN (sequence->insn (0)) = prev;
4211 }
89e99eea 4212 }
89e99eea
DB
4213 else
4214 {
614d5bd8
AM
4215 struct sequence_stack *seq;
4216
4217 for (seq = get_current_sequence (); seq; seq = seq->next)
4218 if (insn == seq->last)
89e99eea 4219 {
614d5bd8 4220 seq->last = prev;
89e99eea
DB
4221 break;
4222 }
4223
614d5bd8 4224 gcc_assert (seq);
89e99eea 4225 }
80eb8028 4226
80eb8028 4227 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4228 if (!BARRIER_P (insn)
53c17031
JH
4229 && (bb = BLOCK_FOR_INSN (insn)))
4230 {
a813c111 4231 if (BB_HEAD (bb) == insn)
53c17031 4232 {
3bf1e984
RK
4233 /* Never ever delete the basic block note without deleting whole
4234 basic block. */
5b0264cb 4235 gcc_assert (!NOTE_P (insn));
1130d5e3 4236 BB_HEAD (bb) = next;
53c17031 4237 }
a813c111 4238 if (BB_END (bb) == insn)
1130d5e3 4239 BB_END (bb) = prev;
53c17031 4240 }
89e99eea
DB
4241}
4242
ee960939
OH
4243/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4244
4245void
502b8322 4246add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4247{
5b0264cb 4248 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4249
4250 /* Put the register usage information on the CALL. If there is already
4251 some usage information, put ours at the end. */
4252 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4253 {
4254 rtx link;
4255
4256 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4257 link = XEXP (link, 1))
4258 ;
4259
4260 XEXP (link, 1) = call_fusage;
4261 }
4262 else
4263 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4264}
4265
23b2ce53
RS
4266/* Delete all insns made since FROM.
4267 FROM becomes the new last instruction. */
4268
4269void
fee3e72c 4270delete_insns_since (rtx_insn *from)
23b2ce53
RS
4271{
4272 if (from == 0)
5936d944 4273 set_first_insn (0);
23b2ce53 4274 else
0f82e5c9 4275 SET_NEXT_INSN (from) = 0;
5936d944 4276 set_last_insn (from);
23b2ce53
RS
4277}
4278
5dab5552
MS
4279/* This function is deprecated, please use sequences instead.
4280
4281 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4282 The insns to be moved are those between FROM and TO.
4283 They are moved to a new position after the insn AFTER.
4284 AFTER must not be FROM or TO or any insn in between.
4285
4286 This function does not know about SEQUENCEs and hence should not be
4287 called after delay-slot filling has been done. */
4288
4289void
fee3e72c 4290reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4291{
b2b29377
MM
4292 if (flag_checking)
4293 {
4294 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4295 gcc_assert (after != x);
4296 gcc_assert (after != to);
4297 }
4f8344eb 4298
23b2ce53
RS
4299 /* Splice this bunch out of where it is now. */
4300 if (PREV_INSN (from))
0f82e5c9 4301 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4302 if (NEXT_INSN (to))
0f82e5c9 4303 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4304 if (get_last_insn () == to)
4305 set_last_insn (PREV_INSN (from));
4306 if (get_insns () == from)
4307 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4308
4309 /* Make the new neighbors point to it and it to them. */
4310 if (NEXT_INSN (after))
0f82e5c9 4311 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4312
0f82e5c9
DM
4313 SET_NEXT_INSN (to) = NEXT_INSN (after);
4314 SET_PREV_INSN (from) = after;
4315 SET_NEXT_INSN (after) = from;
c3284718 4316 if (after == get_last_insn ())
5936d944 4317 set_last_insn (to);
23b2ce53
RS
4318}
4319
3c030e88
JH
4320/* Same as function above, but take care to update BB boundaries. */
4321void
ac9d2d2c 4322reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4323{
ac9d2d2c 4324 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4325 basic_block bb, bb2;
4326
4327 reorder_insns_nobb (from, to, after);
4328
4b4bf941 4329 if (!BARRIER_P (after)
3c030e88
JH
4330 && (bb = BLOCK_FOR_INSN (after)))
4331 {
b2908ba6 4332 rtx_insn *x;
6fb5fa3c 4333 df_set_bb_dirty (bb);
68252e27 4334
4b4bf941 4335 if (!BARRIER_P (from)
3c030e88
JH
4336 && (bb2 = BLOCK_FOR_INSN (from)))
4337 {
a813c111 4338 if (BB_END (bb2) == to)
1130d5e3 4339 BB_END (bb2) = prev;
6fb5fa3c 4340 df_set_bb_dirty (bb2);
3c030e88
JH
4341 }
4342
a813c111 4343 if (BB_END (bb) == after)
1130d5e3 4344 BB_END (bb) = to;
3c030e88
JH
4345
4346 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4347 if (!BARRIER_P (x))
63642d5a 4348 df_insn_change_bb (x, bb);
3c030e88
JH
4349 }
4350}
4351
23b2ce53 4352\f
2f937369
DM
4353/* Emit insn(s) of given code and pattern
4354 at a specified place within the doubly-linked list.
23b2ce53 4355
2f937369
DM
4356 All of the emit_foo global entry points accept an object
4357 X which is either an insn list or a PATTERN of a single
4358 instruction.
23b2ce53 4359
2f937369
DM
4360 There are thus a few canonical ways to generate code and
4361 emit it at a specific place in the instruction stream. For
4362 example, consider the instruction named SPOT and the fact that
4363 we would like to emit some instructions before SPOT. We might
4364 do it like this:
23b2ce53 4365
2f937369
DM
4366 start_sequence ();
4367 ... emit the new instructions ...
4368 insns_head = get_insns ();
4369 end_sequence ();
23b2ce53 4370
2f937369 4371 emit_insn_before (insns_head, SPOT);
23b2ce53 4372
2f937369
DM
4373 It used to be common to generate SEQUENCE rtl instead, but that
4374 is a relic of the past which no longer occurs. The reason is that
4375 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4376 generated would almost certainly die right after it was created. */
23b2ce53 4377
cd459bf8 4378static rtx_insn *
5f02387d 4379emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
167b9fae 4380 rtx_insn *(*make_raw) (rtx))
23b2ce53 4381{
167b9fae 4382 rtx_insn *insn;
23b2ce53 4383
5b0264cb 4384 gcc_assert (before);
2f937369
DM
4385
4386 if (x == NULL_RTX)
cd459bf8 4387 return safe_as_a <rtx_insn *> (last);
2f937369
DM
4388
4389 switch (GET_CODE (x))
23b2ce53 4390 {
b5b8b0ac 4391 case DEBUG_INSN:
2f937369
DM
4392 case INSN:
4393 case JUMP_INSN:
4394 case CALL_INSN:
4395 case CODE_LABEL:
4396 case BARRIER:
4397 case NOTE:
167b9fae 4398 insn = as_a <rtx_insn *> (x);
2f937369
DM
4399 while (insn)
4400 {
167b9fae 4401 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4402 add_insn_before (insn, before, bb);
2f937369
DM
4403 last = insn;
4404 insn = next;
4405 }
4406 break;
4407
4408#ifdef ENABLE_RTL_CHECKING
4409 case SEQUENCE:
5b0264cb 4410 gcc_unreachable ();
2f937369
DM
4411 break;
4412#endif
4413
4414 default:
5f02387d 4415 last = (*make_raw) (x);
6fb5fa3c 4416 add_insn_before (last, before, bb);
2f937369 4417 break;
23b2ce53
RS
4418 }
4419
cd459bf8 4420 return safe_as_a <rtx_insn *> (last);
23b2ce53
RS
4421}
4422
5f02387d
NF
4423/* Make X be output before the instruction BEFORE. */
4424
cd459bf8 4425rtx_insn *
596f2b17 4426emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f02387d
NF
4427{
4428 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4429}
4430
2f937369 4431/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4432 and output it before the instruction BEFORE. */
4433
1476d1bd 4434rtx_jump_insn *
596f2b17 4435emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
23b2ce53 4436{
1476d1bd
MM
4437 return as_a <rtx_jump_insn *> (
4438 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4439 make_jump_insn_raw));
23b2ce53
RS
4440}
4441
2f937369 4442/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4443 and output it before the instruction BEFORE. */
4444
cd459bf8 4445rtx_insn *
596f2b17 4446emit_call_insn_before_noloc (rtx x, rtx_insn *before)
969d70ca 4447{
5f02387d
NF
4448 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4449 make_call_insn_raw);
969d70ca
JH
4450}
4451
b5b8b0ac
AO
4452/* Make an instruction with body X and code DEBUG_INSN
4453 and output it before the instruction BEFORE. */
4454
cd459bf8 4455rtx_insn *
b5b8b0ac
AO
4456emit_debug_insn_before_noloc (rtx x, rtx before)
4457{
5f02387d
NF
4458 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4459 make_debug_insn_raw);
b5b8b0ac
AO
4460}
4461
23b2ce53 4462/* Make an insn of code BARRIER
e881bb1b 4463 and output it before the insn BEFORE. */
23b2ce53 4464
cd459bf8 4465rtx_barrier *
502b8322 4466emit_barrier_before (rtx before)
23b2ce53 4467{
cd459bf8 4468 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4469
4470 INSN_UID (insn) = cur_insn_uid++;
4471
6fb5fa3c 4472 add_insn_before (insn, before, NULL);
23b2ce53
RS
4473 return insn;
4474}
4475
e881bb1b
RH
4476/* Emit the label LABEL before the insn BEFORE. */
4477
1476d1bd 4478rtx_code_label *
596f2b17 4479emit_label_before (rtx label, rtx_insn *before)
e881bb1b 4480{
468660d3
SB
4481 gcc_checking_assert (INSN_UID (label) == 0);
4482 INSN_UID (label) = cur_insn_uid++;
4483 add_insn_before (label, before, NULL);
1476d1bd 4484 return as_a <rtx_code_label *> (label);
e881bb1b 4485}
23b2ce53 4486\f
2f937369
DM
4487/* Helper for emit_insn_after, handles lists of instructions
4488 efficiently. */
23b2ce53 4489
e6eda746
DM
4490static rtx_insn *
4491emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
23b2ce53 4492{
e6eda746 4493 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
1130d5e3
DM
4494 rtx_insn *last;
4495 rtx_insn *after_after;
6fb5fa3c
DB
4496 if (!bb && !BARRIER_P (after))
4497 bb = BLOCK_FOR_INSN (after);
23b2ce53 4498
6fb5fa3c 4499 if (bb)
23b2ce53 4500 {
6fb5fa3c 4501 df_set_bb_dirty (bb);
2f937369 4502 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4503 if (!BARRIER_P (last))
6fb5fa3c
DB
4504 {
4505 set_block_for_insn (last, bb);
4506 df_insn_rescan (last);
4507 }
4b4bf941 4508 if (!BARRIER_P (last))
6fb5fa3c
DB
4509 {
4510 set_block_for_insn (last, bb);
4511 df_insn_rescan (last);
4512 }
a813c111 4513 if (BB_END (bb) == after)
1130d5e3 4514 BB_END (bb) = last;
23b2ce53
RS
4515 }
4516 else
2f937369
DM
4517 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4518 continue;
4519
4520 after_after = NEXT_INSN (after);
4521
0f82e5c9
DM
4522 SET_NEXT_INSN (after) = first;
4523 SET_PREV_INSN (first) = after;
4524 SET_NEXT_INSN (last) = after_after;
2f937369 4525 if (after_after)
0f82e5c9 4526 SET_PREV_INSN (after_after) = last;
2f937369 4527
c3284718 4528 if (after == get_last_insn ())
5936d944 4529 set_last_insn (last);
e855c69d 4530
2f937369
DM
4531 return last;
4532}
4533
cd459bf8 4534static rtx_insn *
e6eda746 4535emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
167b9fae 4536 rtx_insn *(*make_raw)(rtx))
2f937369 4537{
e6eda746
DM
4538 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4539 rtx_insn *last = after;
2f937369 4540
5b0264cb 4541 gcc_assert (after);
2f937369
DM
4542
4543 if (x == NULL_RTX)
e6eda746 4544 return last;
2f937369
DM
4545
4546 switch (GET_CODE (x))
23b2ce53 4547 {
b5b8b0ac 4548 case DEBUG_INSN:
2f937369
DM
4549 case INSN:
4550 case JUMP_INSN:
4551 case CALL_INSN:
4552 case CODE_LABEL:
4553 case BARRIER:
4554 case NOTE:
1130d5e3 4555 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4556 break;
4557
4558#ifdef ENABLE_RTL_CHECKING
4559 case SEQUENCE:
5b0264cb 4560 gcc_unreachable ();
2f937369
DM
4561 break;
4562#endif
4563
4564 default:
5f02387d 4565 last = (*make_raw) (x);
6fb5fa3c 4566 add_insn_after (last, after, bb);
2f937369 4567 break;
23b2ce53
RS
4568 }
4569
e6eda746 4570 return last;
23b2ce53
RS
4571}
4572
5f02387d
NF
4573/* Make X be output after the insn AFTER and set the BB of insn. If
4574 BB is NULL, an attempt is made to infer the BB from AFTER. */
4575
cd459bf8 4576rtx_insn *
5f02387d
NF
4577emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4578{
4579 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4580}
4581
255680cf 4582
2f937369 4583/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4584 and output it after the insn AFTER. */
4585
1476d1bd 4586rtx_jump_insn *
a7102479 4587emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4588{
1476d1bd
MM
4589 return as_a <rtx_jump_insn *> (
4590 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
2f937369
DM
4591}
4592
4593/* Make an instruction with body X and code CALL_INSN
4594 and output it after the instruction AFTER. */
4595
cd459bf8 4596rtx_insn *
a7102479 4597emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4598{
5f02387d 4599 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4600}
4601
b5b8b0ac
AO
4602/* Make an instruction with body X and code CALL_INSN
4603 and output it after the instruction AFTER. */
4604
cd459bf8 4605rtx_insn *
b5b8b0ac
AO
4606emit_debug_insn_after_noloc (rtx x, rtx after)
4607{
5f02387d 4608 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4609}
4610
23b2ce53
RS
4611/* Make an insn of code BARRIER
4612 and output it after the insn AFTER. */
4613
cd459bf8 4614rtx_barrier *
502b8322 4615emit_barrier_after (rtx after)
23b2ce53 4616{
cd459bf8 4617 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4618
4619 INSN_UID (insn) = cur_insn_uid++;
4620
6fb5fa3c 4621 add_insn_after (insn, after, NULL);
23b2ce53
RS
4622 return insn;
4623}
4624
4625/* Emit the label LABEL after the insn AFTER. */
4626
cd459bf8 4627rtx_insn *
596f2b17 4628emit_label_after (rtx label, rtx_insn *after)
23b2ce53 4629{
468660d3
SB
4630 gcc_checking_assert (INSN_UID (label) == 0);
4631 INSN_UID (label) = cur_insn_uid++;
4632 add_insn_after (label, after, NULL);
cd459bf8 4633 return as_a <rtx_insn *> (label);
23b2ce53 4634}
96fba521
SB
4635\f
4636/* Notes require a bit of special handling: Some notes need to have their
4637 BLOCK_FOR_INSN set, others should never have it set, and some should
4638 have it set or clear depending on the context. */
4639
4640/* Return true iff a note of kind SUBTYPE should be emitted with routines
4641 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4642 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4643
4644static bool
4645note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4646{
4647 switch (subtype)
4648 {
4649 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4650 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4651 return true;
4652
4653 /* Notes for var tracking and EH region markers can appear between or
4654 inside basic blocks. If the caller is emitting on the basic block
4655 boundary, do not set BLOCK_FOR_INSN on the new note. */
4656 case NOTE_INSN_VAR_LOCATION:
4657 case NOTE_INSN_CALL_ARG_LOCATION:
4658 case NOTE_INSN_EH_REGION_BEG:
4659 case NOTE_INSN_EH_REGION_END:
4660 return on_bb_boundary_p;
4661
4662 /* Otherwise, BLOCK_FOR_INSN must be set. */
4663 default:
4664 return false;
4665 }
4666}
23b2ce53
RS
4667
4668/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4669
66e8df53 4670rtx_note *
589e43f9 4671emit_note_after (enum insn_note subtype, rtx_insn *after)
23b2ce53 4672{
66e8df53 4673 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4674 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4675 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4676
4677 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4678 add_insn_after_nobb (note, after);
4679 else
4680 add_insn_after (note, after, bb);
4681 return note;
4682}
4683
4684/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4685
66e8df53 4686rtx_note *
89b6250d 4687emit_note_before (enum insn_note subtype, rtx_insn *before)
96fba521 4688{
66e8df53 4689 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4690 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4691 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4692
4693 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4694 add_insn_before_nobb (note, before);
4695 else
4696 add_insn_before (note, before, bb);
23b2ce53
RS
4697 return note;
4698}
23b2ce53 4699\f
e8110d6f
NF
4700/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4701 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4702
cd459bf8 4703static rtx_insn *
dc01c3d1 4704emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
167b9fae 4705 rtx_insn *(*make_raw) (rtx))
0d682900 4706{
dc01c3d1 4707 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
e67d1102 4708 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4709
a7102479 4710 if (pattern == NULL_RTX || !loc)
e67d1102 4711 return last;
dd3adcf8 4712
2f937369
DM
4713 after = NEXT_INSN (after);
4714 while (1)
4715 {
20d4397a
EB
4716 if (active_insn_p (after)
4717 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4718 && !INSN_LOCATION (after))
5368224f 4719 INSN_LOCATION (after) = loc;
2f937369
DM
4720 if (after == last)
4721 break;
4722 after = NEXT_INSN (after);
4723 }
e67d1102 4724 return last;
0d682900
JH
4725}
4726
e8110d6f
NF
4727/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4728 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4729 any DEBUG_INSNs. */
4730
cd459bf8 4731static rtx_insn *
dc01c3d1 4732emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
167b9fae 4733 rtx_insn *(*make_raw) (rtx))
a7102479 4734{
dc01c3d1
DM
4735 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4736 rtx_insn *prev = after;
b5b8b0ac 4737
e8110d6f
NF
4738 if (skip_debug_insns)
4739 while (DEBUG_INSN_P (prev))
4740 prev = PREV_INSN (prev);
b5b8b0ac
AO
4741
4742 if (INSN_P (prev))
5368224f 4743 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4744 make_raw);
a7102479 4745 else
e8110d6f 4746 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4747}
4748
5368224f 4749/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4750rtx_insn *
e8110d6f 4751emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4752{
e8110d6f
NF
4753 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4754}
2f937369 4755
5368224f 4756/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4757rtx_insn *
e8110d6f
NF
4758emit_insn_after (rtx pattern, rtx after)
4759{
4760 return emit_pattern_after (pattern, after, true, make_insn_raw);
4761}
dd3adcf8 4762
5368224f 4763/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4764rtx_jump_insn *
e8110d6f
NF
4765emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4766{
1476d1bd
MM
4767 return as_a <rtx_jump_insn *> (
4768 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
0d682900
JH
4769}
4770
5368224f 4771/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
1476d1bd 4772rtx_jump_insn *
a7102479
JH
4773emit_jump_insn_after (rtx pattern, rtx after)
4774{
1476d1bd
MM
4775 return as_a <rtx_jump_insn *> (
4776 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
a7102479
JH
4777}
4778
5368224f 4779/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4780rtx_insn *
502b8322 4781emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4782{
e8110d6f 4783 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4784}
4785
5368224f 4786/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4787rtx_insn *
a7102479
JH
4788emit_call_insn_after (rtx pattern, rtx after)
4789{
e8110d6f 4790 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4791}
4792
5368224f 4793/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4794rtx_insn *
b5b8b0ac
AO
4795emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4796{
e8110d6f 4797 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4798}
4799
5368224f 4800/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4801rtx_insn *
b5b8b0ac
AO
4802emit_debug_insn_after (rtx pattern, rtx after)
4803{
e8110d6f 4804 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4805}
4806
e8110d6f
NF
4807/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4808 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4809 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4810 CALL_INSN, etc. */
4811
cd459bf8 4812static rtx_insn *
dc01c3d1 4813emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
167b9fae 4814 rtx_insn *(*make_raw) (rtx))
0d682900 4815{
dc01c3d1
DM
4816 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4817 rtx_insn *first = PREV_INSN (before);
4818 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4819 insnp ? before : NULL_RTX,
4820 NULL, make_raw);
a7102479
JH
4821
4822 if (pattern == NULL_RTX || !loc)
dc01c3d1 4823 return last;
a7102479 4824
26cb3993
JH
4825 if (!first)
4826 first = get_insns ();
4827 else
4828 first = NEXT_INSN (first);
a7102479
JH
4829 while (1)
4830 {
20d4397a
EB
4831 if (active_insn_p (first)
4832 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4833 && !INSN_LOCATION (first))
5368224f 4834 INSN_LOCATION (first) = loc;
a7102479
JH
4835 if (first == last)
4836 break;
4837 first = NEXT_INSN (first);
4838 }
dc01c3d1 4839 return last;
a7102479
JH
4840}
4841
e8110d6f
NF
4842/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4843 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4844 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4845 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4846
cd459bf8 4847static rtx_insn *
dc01c3d1 4848emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
167b9fae 4849 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 4850{
dc01c3d1
DM
4851 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4852 rtx_insn *next = before;
b5b8b0ac 4853
e8110d6f
NF
4854 if (skip_debug_insns)
4855 while (DEBUG_INSN_P (next))
4856 next = PREV_INSN (next);
b5b8b0ac
AO
4857
4858 if (INSN_P (next))
5368224f 4859 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4860 insnp, make_raw);
a7102479 4861 else
e8110d6f 4862 return emit_pattern_before_noloc (pattern, before,
9b2ea071 4863 insnp ? before : NULL_RTX,
e8110d6f 4864 NULL, make_raw);
a7102479
JH
4865}
4866
5368224f 4867/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4868rtx_insn *
596f2b17 4869emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4870{
e8110d6f
NF
4871 return emit_pattern_before_setloc (pattern, before, loc, true,
4872 make_insn_raw);
4873}
a7102479 4874
5368224f 4875/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4876rtx_insn *
e8110d6f
NF
4877emit_insn_before (rtx pattern, rtx before)
4878{
4879 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4880}
a7102479 4881
5368224f 4882/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4883rtx_jump_insn *
596f2b17 4884emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
e8110d6f 4885{
1476d1bd
MM
4886 return as_a <rtx_jump_insn *> (
4887 emit_pattern_before_setloc (pattern, before, loc, false,
4888 make_jump_insn_raw));
a7102479
JH
4889}
4890
5368224f 4891/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
1476d1bd 4892rtx_jump_insn *
a7102479
JH
4893emit_jump_insn_before (rtx pattern, rtx before)
4894{
1476d1bd
MM
4895 return as_a <rtx_jump_insn *> (
4896 emit_pattern_before (pattern, before, true, false,
4897 make_jump_insn_raw));
a7102479
JH
4898}
4899
5368224f 4900/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4901rtx_insn *
596f2b17 4902emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4903{
e8110d6f
NF
4904 return emit_pattern_before_setloc (pattern, before, loc, false,
4905 make_call_insn_raw);
0d682900 4906}
a7102479 4907
e8110d6f 4908/* Like emit_call_insn_before_noloc,
5368224f 4909 but set insn_location according to BEFORE. */
cd459bf8 4910rtx_insn *
596f2b17 4911emit_call_insn_before (rtx pattern, rtx_insn *before)
a7102479 4912{
e8110d6f
NF
4913 return emit_pattern_before (pattern, before, true, false,
4914 make_call_insn_raw);
a7102479 4915}
b5b8b0ac 4916
5368224f 4917/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4918rtx_insn *
b5b8b0ac
AO
4919emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4920{
e8110d6f
NF
4921 return emit_pattern_before_setloc (pattern, before, loc, false,
4922 make_debug_insn_raw);
b5b8b0ac
AO
4923}
4924
e8110d6f 4925/* Like emit_debug_insn_before_noloc,
5368224f 4926 but set insn_location according to BEFORE. */
cd459bf8 4927rtx_insn *
3a6216b0 4928emit_debug_insn_before (rtx pattern, rtx_insn *before)
b5b8b0ac 4929{
e8110d6f
NF
4930 return emit_pattern_before (pattern, before, false, false,
4931 make_debug_insn_raw);
b5b8b0ac 4932}
0d682900 4933\f
2f937369
DM
4934/* Take X and emit it at the end of the doubly-linked
4935 INSN list.
23b2ce53
RS
4936
4937 Returns the last insn emitted. */
4938
cd459bf8 4939rtx_insn *
502b8322 4940emit_insn (rtx x)
23b2ce53 4941{
cd459bf8
DM
4942 rtx_insn *last = get_last_insn ();
4943 rtx_insn *insn;
23b2ce53 4944
2f937369
DM
4945 if (x == NULL_RTX)
4946 return last;
23b2ce53 4947
2f937369
DM
4948 switch (GET_CODE (x))
4949 {
b5b8b0ac 4950 case DEBUG_INSN:
2f937369
DM
4951 case INSN:
4952 case JUMP_INSN:
4953 case CALL_INSN:
4954 case CODE_LABEL:
4955 case BARRIER:
4956 case NOTE:
cd459bf8 4957 insn = as_a <rtx_insn *> (x);
2f937369 4958 while (insn)
23b2ce53 4959 {
cd459bf8 4960 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 4961 add_insn (insn);
2f937369
DM
4962 last = insn;
4963 insn = next;
23b2ce53 4964 }
2f937369 4965 break;
23b2ce53 4966
2f937369 4967#ifdef ENABLE_RTL_CHECKING
39718607 4968 case JUMP_TABLE_DATA:
2f937369 4969 case SEQUENCE:
5b0264cb 4970 gcc_unreachable ();
2f937369
DM
4971 break;
4972#endif
23b2ce53 4973
2f937369
DM
4974 default:
4975 last = make_insn_raw (x);
4976 add_insn (last);
4977 break;
23b2ce53
RS
4978 }
4979
4980 return last;
4981}
4982
b5b8b0ac
AO
4983/* Make an insn of code DEBUG_INSN with pattern X
4984 and add it to the end of the doubly-linked list. */
4985
cd459bf8 4986rtx_insn *
b5b8b0ac
AO
4987emit_debug_insn (rtx x)
4988{
cd459bf8
DM
4989 rtx_insn *last = get_last_insn ();
4990 rtx_insn *insn;
b5b8b0ac
AO
4991
4992 if (x == NULL_RTX)
4993 return last;
4994
4995 switch (GET_CODE (x))
4996 {
4997 case DEBUG_INSN:
4998 case INSN:
4999 case JUMP_INSN:
5000 case CALL_INSN:
5001 case CODE_LABEL:
5002 case BARRIER:
5003 case NOTE:
cd459bf8 5004 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
5005 while (insn)
5006 {
cd459bf8 5007 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
5008 add_insn (insn);
5009 last = insn;
5010 insn = next;
5011 }
5012 break;
5013
5014#ifdef ENABLE_RTL_CHECKING
39718607 5015 case JUMP_TABLE_DATA:
b5b8b0ac
AO
5016 case SEQUENCE:
5017 gcc_unreachable ();
5018 break;
5019#endif
5020
5021 default:
5022 last = make_debug_insn_raw (x);
5023 add_insn (last);
5024 break;
5025 }
5026
5027 return last;
5028}
5029
2f937369
DM
5030/* Make an insn of code JUMP_INSN with pattern X
5031 and add it to the end of the doubly-linked list. */
23b2ce53 5032
cd459bf8 5033rtx_insn *
502b8322 5034emit_jump_insn (rtx x)
23b2ce53 5035{
cd459bf8
DM
5036 rtx_insn *last = NULL;
5037 rtx_insn *insn;
23b2ce53 5038
2f937369 5039 switch (GET_CODE (x))
23b2ce53 5040 {
b5b8b0ac 5041 case DEBUG_INSN:
2f937369
DM
5042 case INSN:
5043 case JUMP_INSN:
5044 case CALL_INSN:
5045 case CODE_LABEL:
5046 case BARRIER:
5047 case NOTE:
cd459bf8 5048 insn = as_a <rtx_insn *> (x);
2f937369
DM
5049 while (insn)
5050 {
cd459bf8 5051 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5052 add_insn (insn);
5053 last = insn;
5054 insn = next;
5055 }
5056 break;
e0a5c5eb 5057
2f937369 5058#ifdef ENABLE_RTL_CHECKING
39718607 5059 case JUMP_TABLE_DATA:
2f937369 5060 case SEQUENCE:
5b0264cb 5061 gcc_unreachable ();
2f937369
DM
5062 break;
5063#endif
e0a5c5eb 5064
2f937369
DM
5065 default:
5066 last = make_jump_insn_raw (x);
5067 add_insn (last);
5068 break;
3c030e88 5069 }
e0a5c5eb
RS
5070
5071 return last;
5072}
5073
2f937369 5074/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5075 and add it to the end of the doubly-linked list. */
5076
cd459bf8 5077rtx_insn *
502b8322 5078emit_call_insn (rtx x)
23b2ce53 5079{
cd459bf8 5080 rtx_insn *insn;
2f937369
DM
5081
5082 switch (GET_CODE (x))
23b2ce53 5083 {
b5b8b0ac 5084 case DEBUG_INSN:
2f937369
DM
5085 case INSN:
5086 case JUMP_INSN:
5087 case CALL_INSN:
5088 case CODE_LABEL:
5089 case BARRIER:
5090 case NOTE:
5091 insn = emit_insn (x);
5092 break;
23b2ce53 5093
2f937369
DM
5094#ifdef ENABLE_RTL_CHECKING
5095 case SEQUENCE:
39718607 5096 case JUMP_TABLE_DATA:
5b0264cb 5097 gcc_unreachable ();
2f937369
DM
5098 break;
5099#endif
23b2ce53 5100
2f937369
DM
5101 default:
5102 insn = make_call_insn_raw (x);
23b2ce53 5103 add_insn (insn);
2f937369 5104 break;
23b2ce53 5105 }
2f937369
DM
5106
5107 return insn;
23b2ce53
RS
5108}
5109
5110/* Add the label LABEL to the end of the doubly-linked list. */
5111
1476d1bd
MM
5112rtx_code_label *
5113emit_label (rtx uncast_label)
23b2ce53 5114{
1476d1bd
MM
5115 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5116
468660d3
SB
5117 gcc_checking_assert (INSN_UID (label) == 0);
5118 INSN_UID (label) = cur_insn_uid++;
1476d1bd
MM
5119 add_insn (label);
5120 return label;
23b2ce53
RS
5121}
5122
39718607
SB
5123/* Make an insn of code JUMP_TABLE_DATA
5124 and add it to the end of the doubly-linked list. */
5125
4598afdd 5126rtx_jump_table_data *
39718607
SB
5127emit_jump_table_data (rtx table)
5128{
4598afdd
DM
5129 rtx_jump_table_data *jump_table_data =
5130 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5131 INSN_UID (jump_table_data) = cur_insn_uid++;
5132 PATTERN (jump_table_data) = table;
5133 BLOCK_FOR_INSN (jump_table_data) = NULL;
5134 add_insn (jump_table_data);
5135 return jump_table_data;
5136}
5137
23b2ce53
RS
5138/* Make an insn of code BARRIER
5139 and add it to the end of the doubly-linked list. */
5140
cd459bf8 5141rtx_barrier *
502b8322 5142emit_barrier (void)
23b2ce53 5143{
cd459bf8 5144 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5145 INSN_UID (barrier) = cur_insn_uid++;
5146 add_insn (barrier);
5147 return barrier;
5148}
5149
5f2fc772 5150/* Emit a copy of note ORIG. */
502b8322 5151
66e8df53
DM
5152rtx_note *
5153emit_note_copy (rtx_note *orig)
5f2fc772 5154{
96fba521 5155 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5156 rtx_note *note = make_note_raw (kind);
5f2fc772 5157 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5158 add_insn (note);
2e040219 5159 return note;
23b2ce53
RS
5160}
5161
2e040219
NS
5162/* Make an insn of code NOTE or type NOTE_NO
5163 and add it to the end of the doubly-linked list. */
23b2ce53 5164
66e8df53 5165rtx_note *
a38e7aa5 5166emit_note (enum insn_note kind)
23b2ce53 5167{
66e8df53 5168 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5169 add_insn (note);
5170 return note;
5171}
5172
c41c1387
RS
5173/* Emit a clobber of lvalue X. */
5174
cd459bf8 5175rtx_insn *
c41c1387
RS
5176emit_clobber (rtx x)
5177{
5178 /* CONCATs should not appear in the insn stream. */
5179 if (GET_CODE (x) == CONCAT)
5180 {
5181 emit_clobber (XEXP (x, 0));
5182 return emit_clobber (XEXP (x, 1));
5183 }
5184 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5185}
5186
5187/* Return a sequence of insns to clobber lvalue X. */
5188
cd459bf8 5189rtx_insn *
c41c1387
RS
5190gen_clobber (rtx x)
5191{
cd459bf8 5192 rtx_insn *seq;
c41c1387
RS
5193
5194 start_sequence ();
5195 emit_clobber (x);
5196 seq = get_insns ();
5197 end_sequence ();
5198 return seq;
5199}
5200
5201/* Emit a use of rvalue X. */
5202
cd459bf8 5203rtx_insn *
c41c1387
RS
5204emit_use (rtx x)
5205{
5206 /* CONCATs should not appear in the insn stream. */
5207 if (GET_CODE (x) == CONCAT)
5208 {
5209 emit_use (XEXP (x, 0));
5210 return emit_use (XEXP (x, 1));
5211 }
5212 return emit_insn (gen_rtx_USE (VOIDmode, x));
5213}
5214
5215/* Return a sequence of insns to use rvalue X. */
5216
cd459bf8 5217rtx_insn *
c41c1387
RS
5218gen_use (rtx x)
5219{
cd459bf8 5220 rtx_insn *seq;
c41c1387
RS
5221
5222 start_sequence ();
5223 emit_use (x);
5224 seq = get_insns ();
5225 end_sequence ();
5226 return seq;
5227}
5228
c8912e53
RS
5229/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5230 Return the set in INSN that such notes describe, or NULL if the notes
5231 have no meaning for INSN. */
5232
5233rtx
5234set_for_reg_notes (rtx insn)
5235{
5236 rtx pat, reg;
5237
5238 if (!INSN_P (insn))
5239 return NULL_RTX;
5240
5241 pat = PATTERN (insn);
5242 if (GET_CODE (pat) == PARALLEL)
5243 {
5244 /* We do not use single_set because that ignores SETs of unused
5245 registers. REG_EQUAL and REG_EQUIV notes really do require the
5246 PARALLEL to have a single SET. */
5247 if (multiple_sets (insn))
5248 return NULL_RTX;
5249 pat = XVECEXP (pat, 0, 0);
5250 }
5251
5252 if (GET_CODE (pat) != SET)
5253 return NULL_RTX;
5254
5255 reg = SET_DEST (pat);
5256
5257 /* Notes apply to the contents of a STRICT_LOW_PART. */
7f7379f6
KV
5258 if (GET_CODE (reg) == STRICT_LOW_PART
5259 || GET_CODE (reg) == ZERO_EXTRACT)
c8912e53
RS
5260 reg = XEXP (reg, 0);
5261
5262 /* Check that we have a register. */
5263 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5264 return NULL_RTX;
5265
5266 return pat;
5267}
5268
87b47c85 5269/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5270 note of this type already exists, remove it first. */
87b47c85 5271
3d238248 5272rtx
502b8322 5273set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5274{
5275 rtx note = find_reg_note (insn, kind, NULL_RTX);
5276
52488da1
JW
5277 switch (kind)
5278 {
5279 case REG_EQUAL:
5280 case REG_EQUIV:
8073cbd4
EB
5281 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5282 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
c8912e53 5283 return NULL_RTX;
52488da1
JW
5284
5285 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5286 It serves no useful purpose and breaks eliminate_regs. */
5287 if (GET_CODE (datum) == ASM_OPERANDS)
5288 return NULL_RTX;
109374e2
RS
5289
5290 /* Notes with side effects are dangerous. Even if the side-effect
5291 initially mirrors one in PATTERN (INSN), later optimizations
5292 might alter the way that the final register value is calculated
5293 and so move or alter the side-effect in some way. The note would
5294 then no longer be a valid substitution for SET_SRC. */
5295 if (side_effects_p (datum))
5296 return NULL_RTX;
52488da1
JW
5297 break;
5298
5299 default:
5300 break;
5301 }
3d238248 5302
c8912e53
RS
5303 if (note)
5304 XEXP (note, 0) = datum;
5305 else
5306 {
5307 add_reg_note (insn, kind, datum);
5308 note = REG_NOTES (insn);
5309 }
6fb5fa3c
DB
5310
5311 switch (kind)
3d238248 5312 {
6fb5fa3c
DB
5313 case REG_EQUAL:
5314 case REG_EQUIV:
b2908ba6 5315 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5316 break;
5317 default:
5318 break;
3d238248 5319 }
87b47c85 5320
c8912e53 5321 return note;
87b47c85 5322}
7543f918
JR
5323
5324/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5325rtx
5326set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5327{
c8912e53 5328 rtx set = set_for_reg_notes (insn);
7543f918
JR
5329
5330 if (set && SET_DEST (set) == dst)
5331 return set_unique_reg_note (insn, kind, datum);
5332 return NULL_RTX;
5333}
23b2ce53 5334\f
9d8895c9
RS
5335/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5336 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5337 is true.
5338
23b2ce53
RS
5339 If X is a label, it is simply added into the insn chain. */
5340
cd459bf8 5341rtx_insn *
9d8895c9 5342emit (rtx x, bool allow_barrier_p)
23b2ce53
RS
5343{
5344 enum rtx_code code = classify_insn (x);
5345
5b0264cb 5346 switch (code)
23b2ce53 5347 {
5b0264cb
NS
5348 case CODE_LABEL:
5349 return emit_label (x);
5350 case INSN:
5351 return emit_insn (x);
5352 case JUMP_INSN:
5353 {
cd459bf8 5354 rtx_insn *insn = emit_jump_insn (x);
9d8895c9
RS
5355 if (allow_barrier_p
5356 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5b0264cb
NS
5357 return emit_barrier ();
5358 return insn;
5359 }
5360 case CALL_INSN:
5361 return emit_call_insn (x);
b5b8b0ac
AO
5362 case DEBUG_INSN:
5363 return emit_debug_insn (x);
5b0264cb
NS
5364 default:
5365 gcc_unreachable ();
23b2ce53 5366 }
23b2ce53
RS
5367}
5368\f
e2500fed 5369/* Space for free sequence stack entries. */
1431042e 5370static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5371
4dfa0342
RH
5372/* Begin emitting insns to a sequence. If this sequence will contain
5373 something that might cause the compiler to pop arguments to function
5374 calls (because those pops have previously been deferred; see
5375 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5376 before calling this function. That will ensure that the deferred
5377 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5378
5379void
502b8322 5380start_sequence (void)
23b2ce53
RS
5381{
5382 struct sequence_stack *tem;
5383
e2500fed
GK
5384 if (free_sequence_stack != NULL)
5385 {
5386 tem = free_sequence_stack;
5387 free_sequence_stack = tem->next;
5388 }
5389 else
766090c2 5390 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5391
614d5bd8 5392 tem->next = get_current_sequence ()->next;
5936d944
JH
5393 tem->first = get_insns ();
5394 tem->last = get_last_insn ();
614d5bd8 5395 get_current_sequence ()->next = tem;
23b2ce53 5396
5936d944
JH
5397 set_first_insn (0);
5398 set_last_insn (0);
23b2ce53
RS
5399}
5400
5c7a310f
MM
5401/* Set up the insn chain starting with FIRST as the current sequence,
5402 saving the previously current one. See the documentation for
5403 start_sequence for more information about how to use this function. */
23b2ce53
RS
5404
5405void
fee3e72c 5406push_to_sequence (rtx_insn *first)
23b2ce53 5407{
fee3e72c 5408 rtx_insn *last;
23b2ce53
RS
5409
5410 start_sequence ();
5411
e84a58ff
EB
5412 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5413 ;
23b2ce53 5414
5936d944
JH
5415 set_first_insn (first);
5416 set_last_insn (last);
23b2ce53
RS
5417}
5418
bb27eeda
SE
5419/* Like push_to_sequence, but take the last insn as an argument to avoid
5420 looping through the list. */
5421
5422void
fee3e72c 5423push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5424{
5425 start_sequence ();
5426
5936d944
JH
5427 set_first_insn (first);
5428 set_last_insn (last);
bb27eeda
SE
5429}
5430
f15ae3a1
TW
5431/* Set up the outer-level insn chain
5432 as the current sequence, saving the previously current one. */
5433
5434void
502b8322 5435push_topmost_sequence (void)
f15ae3a1 5436{
614d5bd8 5437 struct sequence_stack *top;
f15ae3a1
TW
5438
5439 start_sequence ();
5440
614d5bd8 5441 top = get_topmost_sequence ();
5936d944
JH
5442 set_first_insn (top->first);
5443 set_last_insn (top->last);
f15ae3a1
TW
5444}
5445
5446/* After emitting to the outer-level insn chain, update the outer-level
5447 insn chain, and restore the previous saved state. */
5448
5449void
502b8322 5450pop_topmost_sequence (void)
f15ae3a1 5451{
614d5bd8 5452 struct sequence_stack *top;
f15ae3a1 5453
614d5bd8 5454 top = get_topmost_sequence ();
5936d944
JH
5455 top->first = get_insns ();
5456 top->last = get_last_insn ();
f15ae3a1
TW
5457
5458 end_sequence ();
5459}
5460
23b2ce53
RS
5461/* After emitting to a sequence, restore previous saved state.
5462
5c7a310f 5463 To get the contents of the sequence just made, you must call
2f937369 5464 `get_insns' *before* calling here.
5c7a310f
MM
5465
5466 If the compiler might have deferred popping arguments while
5467 generating this sequence, and this sequence will not be immediately
5468 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5469 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5470 pops are inserted into this sequence, and not into some random
5471 location in the instruction stream. See INHIBIT_DEFER_POP for more
5472 information about deferred popping of arguments. */
23b2ce53
RS
5473
5474void
502b8322 5475end_sequence (void)
23b2ce53 5476{
614d5bd8 5477 struct sequence_stack *tem = get_current_sequence ()->next;
23b2ce53 5478
5936d944
JH
5479 set_first_insn (tem->first);
5480 set_last_insn (tem->last);
614d5bd8 5481 get_current_sequence ()->next = tem->next;
23b2ce53 5482
e2500fed
GK
5483 memset (tem, 0, sizeof (*tem));
5484 tem->next = free_sequence_stack;
5485 free_sequence_stack = tem;
23b2ce53
RS
5486}
5487
5488/* Return 1 if currently emitting into a sequence. */
5489
5490int
502b8322 5491in_sequence_p (void)
23b2ce53 5492{
614d5bd8 5493 return get_current_sequence ()->next != 0;
23b2ce53 5494}
23b2ce53 5495\f
59ec66dc
MM
5496/* Put the various virtual registers into REGNO_REG_RTX. */
5497
2bbdec73 5498static void
bd60bab2 5499init_virtual_regs (void)
59ec66dc 5500{
bd60bab2
JH
5501 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5502 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5503 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5504 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5505 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5506 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5507 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5508}
5509
da43a810
BS
5510\f
5511/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5512static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5513static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5514static int copy_insn_n_scratches;
5515
5516/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5517 copied an ASM_OPERANDS.
5518 In that case, it is the original input-operand vector. */
5519static rtvec orig_asm_operands_vector;
5520
5521/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5522 copied an ASM_OPERANDS.
5523 In that case, it is the copied input-operand vector. */
5524static rtvec copy_asm_operands_vector;
5525
5526/* Likewise for the constraints vector. */
5527static rtvec orig_asm_constraints_vector;
5528static rtvec copy_asm_constraints_vector;
5529
5530/* Recursively create a new copy of an rtx for copy_insn.
5531 This function differs from copy_rtx in that it handles SCRATCHes and
5532 ASM_OPERANDs properly.
5533 Normally, this function is not used directly; use copy_insn as front end.
5534 However, you could first copy an insn pattern with copy_insn and then use
5535 this function afterwards to properly copy any REG_NOTEs containing
5536 SCRATCHes. */
5537
5538rtx
502b8322 5539copy_insn_1 (rtx orig)
da43a810 5540{
b3694847
SS
5541 rtx copy;
5542 int i, j;
5543 RTX_CODE code;
5544 const char *format_ptr;
da43a810 5545
cd9c1ca8
RH
5546 if (orig == NULL)
5547 return NULL;
5548
da43a810
BS
5549 code = GET_CODE (orig);
5550
5551 switch (code)
5552 {
5553 case REG:
a52a87c3 5554 case DEBUG_EXPR:
d8116890 5555 CASE_CONST_ANY:
da43a810
BS
5556 case SYMBOL_REF:
5557 case CODE_LABEL:
5558 case PC:
5559 case CC0:
276e0224 5560 case RETURN:
26898771 5561 case SIMPLE_RETURN:
da43a810 5562 return orig;
3e89ed8d 5563 case CLOBBER:
c5c5ba89
JH
5564 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5565 clobbers or clobbers of hard registers that originated as pseudos.
5566 This is needed to allow safe register renaming. */
d7ae3739
EB
5567 if (REG_P (XEXP (orig, 0))
5568 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5569 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
3e89ed8d
JH
5570 return orig;
5571 break;
da43a810
BS
5572
5573 case SCRATCH:
5574 for (i = 0; i < copy_insn_n_scratches; i++)
5575 if (copy_insn_scratch_in[i] == orig)
5576 return copy_insn_scratch_out[i];
5577 break;
5578
5579 case CONST:
6fb5fa3c 5580 if (shared_const_p (orig))
da43a810
BS
5581 return orig;
5582 break;
750c9258 5583
da43a810
BS
5584 /* A MEM with a constant address is not sharable. The problem is that
5585 the constant address may need to be reloaded. If the mem is shared,
5586 then reloading one copy of this mem will cause all copies to appear
5587 to have been reloaded. */
5588
5589 default:
5590 break;
5591 }
5592
aacd3885
RS
5593 /* Copy the various flags, fields, and other information. We assume
5594 that all fields need copying, and then clear the fields that should
da43a810
BS
5595 not be copied. That is the sensible default behavior, and forces
5596 us to explicitly document why we are *not* copying a flag. */
aacd3885 5597 copy = shallow_copy_rtx (orig);
da43a810 5598
da43a810 5599 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5600 if (INSN_P (orig))
da43a810 5601 {
2adc7f12
JJ
5602 RTX_FLAG (copy, jump) = 0;
5603 RTX_FLAG (copy, call) = 0;
5604 RTX_FLAG (copy, frame_related) = 0;
da43a810 5605 }
750c9258 5606
da43a810
BS
5607 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5608
5609 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5610 switch (*format_ptr++)
5611 {
5612 case 'e':
5613 if (XEXP (orig, i) != NULL)
5614 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5615 break;
da43a810 5616
aacd3885
RS
5617 case 'E':
5618 case 'V':
5619 if (XVEC (orig, i) == orig_asm_constraints_vector)
5620 XVEC (copy, i) = copy_asm_constraints_vector;
5621 else if (XVEC (orig, i) == orig_asm_operands_vector)
5622 XVEC (copy, i) = copy_asm_operands_vector;
5623 else if (XVEC (orig, i) != NULL)
5624 {
5625 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5626 for (j = 0; j < XVECLEN (copy, i); j++)
5627 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5628 }
5629 break;
da43a810 5630
aacd3885
RS
5631 case 't':
5632 case 'w':
5633 case 'i':
5634 case 's':
5635 case 'S':
5636 case 'u':
5637 case '0':
5638 /* These are left unchanged. */
5639 break;
da43a810 5640
aacd3885
RS
5641 default:
5642 gcc_unreachable ();
5643 }
da43a810
BS
5644
5645 if (code == SCRATCH)
5646 {
5647 i = copy_insn_n_scratches++;
5b0264cb 5648 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5649 copy_insn_scratch_in[i] = orig;
5650 copy_insn_scratch_out[i] = copy;
5651 }
5652 else if (code == ASM_OPERANDS)
5653 {
6462bb43
AO
5654 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5655 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5656 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5657 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5658 }
5659
5660 return copy;
5661}
5662
5663/* Create a new copy of an rtx.
5664 This function differs from copy_rtx in that it handles SCRATCHes and
5665 ASM_OPERANDs properly.
5666 INSN doesn't really have to be a full INSN; it could be just the
5667 pattern. */
5668rtx
502b8322 5669copy_insn (rtx insn)
da43a810
BS
5670{
5671 copy_insn_n_scratches = 0;
5672 orig_asm_operands_vector = 0;
5673 orig_asm_constraints_vector = 0;
5674 copy_asm_operands_vector = 0;
5675 copy_asm_constraints_vector = 0;
5676 return copy_insn_1 (insn);
5677}
59ec66dc 5678
8e383849
JR
5679/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5680 on that assumption that INSN itself remains in its original place. */
5681
f8f0516e
DM
5682rtx_insn *
5683copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5684{
5685 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5686 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5687 INSN_UID (insn) = cur_insn_uid++;
5688 return insn;
5689}
5690
23b2ce53
RS
5691/* Initialize data structures and variables in this file
5692 before generating rtl for each function. */
5693
5694void
502b8322 5695init_emit (void)
23b2ce53 5696{
5936d944
JH
5697 set_first_insn (NULL);
5698 set_last_insn (NULL);
b5b8b0ac
AO
5699 if (MIN_NONDEBUG_INSN_UID)
5700 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5701 else
5702 cur_insn_uid = 1;
5703 cur_debug_insn_uid = 1;
23b2ce53 5704 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5705 first_label_num = label_num;
614d5bd8 5706 get_current_sequence ()->next = NULL;
23b2ce53 5707
23b2ce53
RS
5708 /* Init the tables that describe all the pseudo regs. */
5709
3e029763 5710 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5711
3e029763 5712 crtl->emit.regno_pointer_align
1b4572a8 5713 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5714
f44986d7
DM
5715 regno_reg_rtx
5716 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5717
e50126e8 5718 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5719 memcpy (regno_reg_rtx,
5fb0e246 5720 initial_regno_reg_rtx,
6cde4876 5721 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5722
23b2ce53 5723 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5724 init_virtual_regs ();
740ab4a2
RK
5725
5726 /* Indicate that the virtual registers and stack locations are
5727 all pointers. */
3502dc9c
JDA
5728 REG_POINTER (stack_pointer_rtx) = 1;
5729 REG_POINTER (frame_pointer_rtx) = 1;
5730 REG_POINTER (hard_frame_pointer_rtx) = 1;
5731 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5732
3502dc9c
JDA
5733 REG_POINTER (virtual_incoming_args_rtx) = 1;
5734 REG_POINTER (virtual_stack_vars_rtx) = 1;
5735 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5736 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5737 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5738
86fe05e0 5739#ifdef STACK_BOUNDARY
bdb429a5
RK
5740 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5741 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5742 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5743 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5744
da75ca93
EB
5745 /* ??? These are problematic (for example, 3 out of 4 are wrong on
5746 32-bit SPARC and cannot be all fixed because of the ABI). */
bdb429a5
RK
5747 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5748 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5749 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5750 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
da75ca93 5751
bdb429a5 5752 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5753#endif
5754
5e82e7bd
JVA
5755#ifdef INIT_EXPANDERS
5756 INIT_EXPANDERS;
5757#endif
23b2ce53
RS
5758}
5759
59d06c05 5760/* Like gen_const_vec_duplicate, but ignore const_tiny_rtx. */
69ef87e2
AH
5761
5762static rtx
59d06c05 5763gen_const_vec_duplicate_1 (machine_mode mode, rtx el)
69ef87e2 5764{
59d06c05
RS
5765 int nunits = GET_MODE_NUNITS (mode);
5766 rtvec v = rtvec_alloc (nunits);
5767 for (int i = 0; i < nunits; ++i)
5768 RTVEC_ELT (v, i) = el;
5769 return gen_rtx_raw_CONST_VECTOR (mode, v);
5770}
69ef87e2 5771
59d06c05
RS
5772/* Generate a vector constant of mode MODE in which every element has
5773 value ELT. */
69ef87e2 5774
59d06c05
RS
5775rtx
5776gen_const_vec_duplicate (machine_mode mode, rtx elt)
5777{
5778 scalar_mode inner_mode = GET_MODE_INNER (mode);
5779 if (elt == CONST0_RTX (inner_mode))
5780 return CONST0_RTX (mode);
5781 else if (elt == CONST1_RTX (inner_mode))
5782 return CONST1_RTX (mode);
5783 else if (elt == CONSTM1_RTX (inner_mode))
5784 return CONSTM1_RTX (mode);
5785
5786 return gen_const_vec_duplicate_1 (mode, elt);
5787}
5788
5789/* Return a vector rtx of mode MODE in which every element has value X.
5790 The result will be a constant if X is constant. */
5791
5792rtx
5793gen_vec_duplicate (machine_mode mode, rtx x)
5794{
5795 if (CONSTANT_P (x))
5796 return gen_const_vec_duplicate (mode, x);
5797 return gen_rtx_VEC_DUPLICATE (mode, x);
5798}
15ed7b52 5799
59d06c05
RS
5800/* Generate a new vector constant for mode MODE and constant value
5801 CONSTANT. */
69ef87e2 5802
59d06c05
RS
5803static rtx
5804gen_const_vector (machine_mode mode, int constant)
5805{
5806 machine_mode inner = GET_MODE_INNER (mode);
69ef87e2 5807
59d06c05
RS
5808 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5809
5810 rtx el = const_tiny_rtx[constant][(int) inner];
5811 gcc_assert (el);
69ef87e2 5812
59d06c05 5813 return gen_const_vec_duplicate_1 (mode, el);
69ef87e2
AH
5814}
5815
a06e3c40 5816/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5817 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5818rtx
ef4bddc2 5819gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
a06e3c40 5820{
59d06c05 5821 gcc_assert (GET_MODE_NUNITS (mode) == GET_NUM_ELEM (v));
a73b091d
JW
5822
5823 /* If the values are all the same, check to see if we can use one of the
5824 standard constant vectors. */
59d06c05
RS
5825 if (rtvec_all_equal_p (v))
5826 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
a73b091d
JW
5827
5828 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5829}
5830
b5deb7b6
SL
5831/* Initialise global register information required by all functions. */
5832
5833void
5834init_emit_regs (void)
5835{
5836 int i;
ef4bddc2 5837 machine_mode mode;
1c3f523e 5838 mem_attrs *attrs;
b5deb7b6
SL
5839
5840 /* Reset register attributes */
aebf76a2 5841 reg_attrs_htab->empty ();
b5deb7b6
SL
5842
5843 /* We need reg_raw_mode, so initialize the modes now. */
5844 init_reg_modes_target ();
5845
5846 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5847 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5848 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5849 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5850 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5851 virtual_incoming_args_rtx =
5852 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5853 virtual_stack_vars_rtx =
5854 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5855 virtual_stack_dynamic_rtx =
5856 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5857 virtual_outgoing_args_rtx =
5858 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5859 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5860 virtual_preferred_stack_boundary_rtx =
5861 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5862
5863 /* Initialize RTL for commonly used hard registers. These are
5864 copied into regno_reg_rtx as we begin to compile each function. */
5865 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5866 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5867
5868#ifdef RETURN_ADDRESS_POINTER_REGNUM
5869 return_address_pointer_rtx
5870 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5871#endif
5872
ca72dad5 5873 pic_offset_table_rtx = NULL_RTX;
b5deb7b6
SL
5874 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5875 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
1c3f523e
RS
5876
5877 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5878 {
ef4bddc2 5879 mode = (machine_mode) i;
766090c2 5880 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
5881 attrs->align = BITS_PER_UNIT;
5882 attrs->addrspace = ADDR_SPACE_GENERIC;
5883 if (mode != BLKmode)
5884 {
754c3d5d
RS
5885 attrs->size_known_p = true;
5886 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5887 if (STRICT_ALIGNMENT)
5888 attrs->align = GET_MODE_ALIGNMENT (mode);
5889 }
5890 mode_mem_attrs[i] = attrs;
5891 }
b5deb7b6
SL
5892}
5893
aa3a12d6
RS
5894/* Initialize global machine_mode variables. */
5895
5896void
5897init_derived_machine_modes (void)
5898{
501623d4
RS
5899 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
5900 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
aa3a12d6 5901 {
501623d4
RS
5902 scalar_int_mode mode = mode_iter.require ();
5903
aa3a12d6 5904 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
501623d4
RS
5905 && !opt_byte_mode.exists ())
5906 opt_byte_mode = mode;
aa3a12d6
RS
5907
5908 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
501623d4
RS
5909 && !opt_word_mode.exists ())
5910 opt_word_mode = mode;
aa3a12d6
RS
5911 }
5912
501623d4
RS
5913 byte_mode = opt_byte_mode.require ();
5914 word_mode = opt_word_mode.require ();
fffbab82 5915 ptr_mode = int_mode_for_size (POINTER_SIZE, 0).require ();
aa3a12d6
RS
5916}
5917
2d888286 5918/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5919
5920void
2d888286 5921init_emit_once (void)
23b2ce53
RS
5922{
5923 int i;
ef4bddc2 5924 machine_mode mode;
857c7b46 5925 scalar_float_mode double_mode;
16d22000 5926 opt_scalar_mode smode_iter;
23b2ce53 5927
807e902e
KZ
5928 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5929 CONST_FIXED, and memory attribute hash tables. */
aebf76a2 5930 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
173b24b9 5931
807e902e 5932#if TARGET_SUPPORTS_WIDE_INT
aebf76a2 5933 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
807e902e 5934#endif
aebf76a2 5935 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5692c7bc 5936
aebf76a2 5937 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
091a3ac7 5938
aebf76a2 5939 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
67673f5c 5940
5da077de 5941#ifdef INIT_EXPANDERS
414c4dc4
NC
5942 /* This is to initialize {init|mark|free}_machine_status before the first
5943 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5944 end which calls push_function_context_to before the first call to
5da077de
AS
5945 init_function_start. */
5946 INIT_EXPANDERS;
5947#endif
5948
23b2ce53
RS
5949 /* Create the unique rtx's for certain rtx codes and operand values. */
5950
ecf835e9
KN
5951 /* Process stack-limiting command-line options. */
5952 if (opt_fstack_limit_symbol_arg != NULL)
5953 stack_limit_rtx
5954 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
5955 if (opt_fstack_limit_register_no >= 0)
5956 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
5957
a2a8cc44 5958 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5959 tries to use these variables. */
23b2ce53 5960 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5961 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5962 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5963
68d75312
JC
5964 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5965 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5966 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5967 else
3b80f6ca 5968 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5969
857c7b46 5970 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
aa3a12d6 5971
807e902e
KZ
5972 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5973 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5974 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
5975
5976 dconstm1 = dconst1;
5977 dconstm1.sign = 1;
03f2ea93
RS
5978
5979 dconsthalf = dconst1;
1e92bbb9 5980 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5981
e7c82a99 5982 for (i = 0; i < 3; i++)
23b2ce53 5983 {
aefa9d43 5984 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5985 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5986
c94843d2 5987 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
15ed7b52 5988 const_tiny_rtx[i][(int) mode] =
555affd7 5989 const_double_from_real_value (*r, mode);
15ed7b52 5990
c94843d2 5991 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
5692c7bc 5992 const_tiny_rtx[i][(int) mode] =
555affd7 5993 const_double_from_real_value (*r, mode);
23b2ce53 5994
906c4e36 5995 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5996
c94843d2 5997 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
906c4e36 5998 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5999
ede6c734
MS
6000 for (mode = MIN_MODE_PARTIAL_INT;
6001 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 6002 mode = (machine_mode)((int)(mode) + 1))
33d3e559 6003 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
6004 }
6005
e7c82a99
JJ
6006 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6007
c94843d2 6008 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
e7c82a99
JJ
6009 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6010
ede6c734
MS
6011 for (mode = MIN_MODE_PARTIAL_INT;
6012 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 6013 mode = (machine_mode)((int)(mode) + 1))
c8a89d2a 6014 const_tiny_rtx[3][(int) mode] = constm1_rtx;
c94843d2
RS
6015
6016 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
e90721b1
AP
6017 {
6018 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6019 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6020 }
6021
c94843d2 6022 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
e90721b1
AP
6023 {
6024 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6025 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6026 }
6027
c94843d2 6028 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
a73b091d
JW
6029 {
6030 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6031 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 6032 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 6033 }
69ef87e2 6034
c94843d2 6035 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
a73b091d
JW
6036 {
6037 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6038 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6039 }
69ef87e2 6040
16d22000 6041 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
325217ed 6042 {
16d22000
RS
6043 scalar_mode smode = smode_iter.require ();
6044 FCONST0 (smode).data.high = 0;
6045 FCONST0 (smode).data.low = 0;
6046 FCONST0 (smode).mode = smode;
6047 const_tiny_rtx[0][(int) smode]
6048 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6049 }
6050
16d22000 6051 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
325217ed 6052 {
16d22000
RS
6053 scalar_mode smode = smode_iter.require ();
6054 FCONST0 (smode).data.high = 0;
6055 FCONST0 (smode).data.low = 0;
6056 FCONST0 (smode).mode = smode;
6057 const_tiny_rtx[0][(int) smode]
6058 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6059 }
6060
16d22000 6061 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
325217ed 6062 {
16d22000
RS
6063 scalar_mode smode = smode_iter.require ();
6064 FCONST0 (smode).data.high = 0;
6065 FCONST0 (smode).data.low = 0;
6066 FCONST0 (smode).mode = smode;
6067 const_tiny_rtx[0][(int) smode]
6068 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6069
6070 /* We store the value 1. */
16d22000
RS
6071 FCONST1 (smode).data.high = 0;
6072 FCONST1 (smode).data.low = 0;
6073 FCONST1 (smode).mode = smode;
6074 FCONST1 (smode).data
6075 = double_int_one.lshift (GET_MODE_FBIT (smode),
9be0ac8c 6076 HOST_BITS_PER_DOUBLE_INT,
16d22000
RS
6077 SIGNED_FIXED_POINT_MODE_P (smode));
6078 const_tiny_rtx[1][(int) smode]
6079 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
325217ed
CF
6080 }
6081
16d22000 6082 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
325217ed 6083 {
16d22000
RS
6084 scalar_mode smode = smode_iter.require ();
6085 FCONST0 (smode).data.high = 0;
6086 FCONST0 (smode).data.low = 0;
6087 FCONST0 (smode).mode = smode;
6088 const_tiny_rtx[0][(int) smode]
6089 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
325217ed
CF
6090
6091 /* We store the value 1. */
16d22000
RS
6092 FCONST1 (smode).data.high = 0;
6093 FCONST1 (smode).data.low = 0;
6094 FCONST1 (smode).mode = smode;
6095 FCONST1 (smode).data
6096 = double_int_one.lshift (GET_MODE_FBIT (smode),
9be0ac8c 6097 HOST_BITS_PER_DOUBLE_INT,
16d22000
RS
6098 SIGNED_FIXED_POINT_MODE_P (smode));
6099 const_tiny_rtx[1][(int) smode]
6100 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
091a3ac7
CF
6101 }
6102
c94843d2 6103 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
091a3ac7
CF
6104 {
6105 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6106 }
6107
c94843d2 6108 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
091a3ac7
CF
6109 {
6110 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6111 }
6112
c94843d2 6113 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
091a3ac7
CF
6114 {
6115 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6116 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6117 }
6118
c94843d2 6119 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
091a3ac7
CF
6120 {
6121 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6122 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6123 }
6124
dbbbbf3b 6125 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
ef4bddc2 6126 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
dbbbbf3b 6127 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6128
f0417c82
RH
6129 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6130 if (STORE_FLAG_VALUE == 1)
6131 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91 6132
16d22000 6133 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
d5e254e1 6134 {
16d22000
RS
6135 scalar_mode smode = smode_iter.require ();
6136 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6137 const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
d5e254e1
IE
6138 }
6139
ca4adc91
RS
6140 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6141 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6142 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6143 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
1476d1bd
MM
6144 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6145 /*prev_insn=*/NULL,
6146 /*next_insn=*/NULL,
6147 /*bb=*/NULL,
6148 /*pattern=*/NULL_RTX,
6149 /*location=*/-1,
6150 CODE_FOR_nothing,
6151 /*reg_notes=*/NULL_RTX);
23b2ce53 6152}
a11759a3 6153\f
969d70ca
JH
6154/* Produce exact duplicate of insn INSN after AFTER.
6155 Care updating of libcall regions if present. */
6156
cd459bf8 6157rtx_insn *
a1950df3 6158emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
969d70ca 6159{
cd459bf8
DM
6160 rtx_insn *new_rtx;
6161 rtx link;
969d70ca
JH
6162
6163 switch (GET_CODE (insn))
6164 {
6165 case INSN:
60564289 6166 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6167 break;
6168
6169 case JUMP_INSN:
60564289 6170 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6171 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6172 break;
6173
b5b8b0ac
AO
6174 case DEBUG_INSN:
6175 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6176 break;
6177
969d70ca 6178 case CALL_INSN:
60564289 6179 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6180 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6181 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6182 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6183 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6184 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6185 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6186 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6187 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6188 break;
6189
6190 default:
5b0264cb 6191 gcc_unreachable ();
969d70ca
JH
6192 }
6193
6194 /* Update LABEL_NUSES. */
60564289 6195 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6196
5368224f 6197 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6198
0a3d71f5
JW
6199 /* If the old insn is frame related, then so is the new one. This is
6200 primarily needed for IA-64 unwind info which marks epilogue insns,
6201 which may be duplicated by the basic block reordering code. */
60564289 6202 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6203
1581a12c
BS
6204 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6205 rtx *ptail = &REG_NOTES (new_rtx);
6206 while (*ptail != NULL_RTX)
6207 ptail = &XEXP (*ptail, 1);
6208
cf7c4aa6
HPN
6209 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6210 will make them. REG_LABEL_TARGETs are created there too, but are
6211 supposed to be sticky, so we copy them. */
969d70ca 6212 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6213 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca 6214 {
1581a12c
BS
6215 *ptail = duplicate_reg_note (link);
6216 ptail = &XEXP (*ptail, 1);
969d70ca
JH
6217 }
6218
60564289
KG
6219 INSN_CODE (new_rtx) = INSN_CODE (insn);
6220 return new_rtx;
969d70ca 6221}
e2500fed 6222
1431042e 6223static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d 6224rtx
ef4bddc2 6225gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
3e89ed8d
JH
6226{
6227 if (hard_reg_clobbers[mode][regno])
6228 return hard_reg_clobbers[mode][regno];
6229 else
6230 return (hard_reg_clobbers[mode][regno] =
6231 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6232}
6233
5368224f
DC
6234location_t prologue_location;
6235location_t epilogue_location;
78bde837
SB
6236
6237/* Hold current location information and last location information, so the
6238 datastructures are built lazily only when some instructions in given
6239 place are needed. */
3a50da34 6240static location_t curr_location;
78bde837 6241
5368224f 6242/* Allocate insn location datastructure. */
78bde837 6243void
5368224f 6244insn_locations_init (void)
78bde837 6245{
5368224f 6246 prologue_location = epilogue_location = 0;
78bde837 6247 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6248}
6249
6250/* At the end of emit stage, clear current location. */
6251void
5368224f 6252insn_locations_finalize (void)
78bde837 6253{
5368224f
DC
6254 epilogue_location = curr_location;
6255 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6256}
6257
6258/* Set current location. */
6259void
5368224f 6260set_curr_insn_location (location_t location)
78bde837 6261{
78bde837
SB
6262 curr_location = location;
6263}
6264
6265/* Get current location. */
6266location_t
5368224f 6267curr_insn_location (void)
78bde837
SB
6268{
6269 return curr_location;
6270}
6271
78bde837
SB
6272/* Return lexical scope block insn belongs to. */
6273tree
a1950df3 6274insn_scope (const rtx_insn *insn)
78bde837 6275{
5368224f 6276 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6277}
6278
6279/* Return line number of the statement that produced this insn. */
6280int
a1950df3 6281insn_line (const rtx_insn *insn)
78bde837 6282{
5368224f 6283 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6284}
6285
6286/* Return source file of the statement that produced this insn. */
6287const char *
a1950df3 6288insn_file (const rtx_insn *insn)
78bde837 6289{
5368224f 6290 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6291}
8930883e 6292
ffa4602f
EB
6293/* Return expanded location of the statement that produced this insn. */
6294expanded_location
a1950df3 6295insn_location (const rtx_insn *insn)
ffa4602f
EB
6296{
6297 return expand_location (INSN_LOCATION (insn));
6298}
6299
8930883e
MK
6300/* Return true if memory model MODEL requires a pre-operation (release-style)
6301 barrier or a post-operation (acquire-style) barrier. While not universal,
6302 this function matches behavior of several targets. */
6303
6304bool
6305need_atomic_barrier_p (enum memmodel model, bool pre)
6306{
40ad260d 6307 switch (model & MEMMODEL_BASE_MASK)
8930883e
MK
6308 {
6309 case MEMMODEL_RELAXED:
6310 case MEMMODEL_CONSUME:
6311 return false;
6312 case MEMMODEL_RELEASE:
6313 return pre;
6314 case MEMMODEL_ACQUIRE:
6315 return !pre;
6316 case MEMMODEL_ACQ_REL:
6317 case MEMMODEL_SEQ_CST:
6318 return true;
6319 default:
6320 gcc_unreachable ();
6321 }
6322}
8194c537
DM
6323
6324/* Initialize fields of rtl_data related to stack alignment. */
6325
6326void
6327rtl_data::init_stack_alignment ()
6328{
6329 stack_alignment_needed = STACK_BOUNDARY;
6330 max_used_stack_slot_alignment = STACK_BOUNDARY;
6331 stack_alignment_estimated = 0;
6332 preferred_stack_boundary = STACK_BOUNDARY;
6333}
6334
8930883e 6335\f
e2500fed 6336#include "gt-emit-rtl.h"