]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
* ChangeLog: Fix whitespace.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
cbe34bb5 2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6 36#include "coretypes.h"
4d0cdd0c 37#include "memmodel.h"
c7131fb2 38#include "backend.h"
957060b5 39#include "target.h"
23b2ce53 40#include "rtl.h"
957060b5 41#include "tree.h"
c7131fb2 42#include "df.h"
957060b5
AM
43#include "tm_p.h"
44#include "stringpool.h"
957060b5
AM
45#include "insn-config.h"
46#include "regs.h"
47#include "emit-rtl.h"
48#include "recog.h"
c7131fb2 49#include "diagnostic-core.h"
40e23961 50#include "alias.h"
40e23961 51#include "fold-const.h"
d8a2d370 52#include "varasm.h"
60393bbc 53#include "cfgrtl.h"
60393bbc 54#include "tree-eh.h"
36566b39 55#include "explow.h"
23b2ce53 56#include "expr.h"
b5b8b0ac 57#include "params.h"
9b2b7279 58#include "builtins.h"
9021b8ec 59#include "rtl-iter.h"
1f9ceff1 60#include "stor-layout.h"
ecf835e9 61#include "opts.h"
ca695ac9 62
5fb0e246
RS
63struct target_rtl default_target_rtl;
64#if SWITCHABLE_TARGET
65struct target_rtl *this_target_rtl = &default_target_rtl;
66#endif
67
68#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
1d445e9e
ILT
70/* Commonly used modes. */
71
ef4bddc2
RS
72machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
74machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
75machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 76
bd60bab2
JH
77/* Datastructures maintained for currently processed function in RTL form. */
78
3e029763 79struct rtl_data x_rtl;
bd60bab2
JH
80
81/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 82 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86rtx * regno_reg_rtx;
23b2ce53
RS
87
88/* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
044b4de3 91static GTY(()) int label_num = 1;
23b2ce53 92
23b2ce53
RS
93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 97
e7c82a99 98rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 99
68d75312
JC
100rtx const_true_rtx;
101
23b2ce53
RS
102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
03f2ea93 106REAL_VALUE_TYPE dconsthalf;
23b2ce53 107
325217ed
CF
108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
23b2ce53
RS
112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
5da077de 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 118
ca4adc91
RS
119/* Standard pieces of rtx, to be substituted directly into things. */
120rtx pc_rtx;
121rtx ret_rtx;
122rtx simple_return_rtx;
123rtx cc0_rtx;
124
1476d1bd
MM
125/* Marker used for denoting an INSN, which should never be accessed (i.e.,
126 this pointer should normally never be dereferenced), but is required to be
127 distinct from NULL_RTX. Currently used by peephole2 pass. */
128rtx_insn *invalid_insn_rtx;
129
c13e8210
MM
130/* A hash table storing CONST_INTs whose absolute value is greater
131 than MAX_SAVED_CONST_INT. */
132
6c907cff 133struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
134{
135 typedef HOST_WIDE_INT compare_type;
136
137 static hashval_t hash (rtx i);
138 static bool equal (rtx i, HOST_WIDE_INT h);
139};
c13e8210 140
aebf76a2
TS
141static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
142
6c907cff 143struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
144{
145 static hashval_t hash (rtx x);
146 static bool equal (rtx x, rtx y);
147};
148
149static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
807e902e 150
a560d4d4 151/* A hash table storing register attribute structures. */
6c907cff 152struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
aebf76a2
TS
153{
154 static hashval_t hash (reg_attrs *x);
155 static bool equal (reg_attrs *a, reg_attrs *b);
156};
157
158static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
a560d4d4 159
5692c7bc 160/* A hash table storing all CONST_DOUBLEs. */
6c907cff 161struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
162{
163 static hashval_t hash (rtx x);
164 static bool equal (rtx x, rtx y);
165};
166
167static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
5692c7bc 168
091a3ac7 169/* A hash table storing all CONST_FIXEDs. */
6c907cff 170struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
171{
172 static hashval_t hash (rtx x);
173 static bool equal (rtx x, rtx y);
174};
175
176static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
091a3ac7 177
3e029763 178#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 179#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 180#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 181
5eb2a9f2 182static void set_used_decls (tree);
502b8322 183static void mark_label_nuses (rtx);
807e902e 184#if TARGET_SUPPORTS_WIDE_INT
807e902e
KZ
185static rtx lookup_const_wide_int (rtx);
186#endif
502b8322 187static rtx lookup_const_double (rtx);
091a3ac7 188static rtx lookup_const_fixed (rtx);
502b8322 189static reg_attrs *get_reg_attrs (tree, int);
ef4bddc2 190static rtx gen_const_vector (machine_mode, int);
32b32b16 191static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 192
6b24c259
JH
193/* Probability of the conditional branch currently proceeded by try_split.
194 Set to -1 otherwise. */
195int split_branch_probability = -1;
ca695ac9 196\f
c13e8210
MM
197/* Returns a hash code for X (which is a really a CONST_INT). */
198
aebf76a2
TS
199hashval_t
200const_int_hasher::hash (rtx x)
c13e8210 201{
aebf76a2 202 return (hashval_t) INTVAL (x);
c13e8210
MM
203}
204
cc2902df 205/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
206 CONST_INT) is the same as that given by Y (which is really a
207 HOST_WIDE_INT *). */
208
aebf76a2
TS
209bool
210const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
c13e8210 211{
aebf76a2 212 return (INTVAL (x) == y);
5692c7bc
ZW
213}
214
807e902e
KZ
215#if TARGET_SUPPORTS_WIDE_INT
216/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
217
aebf76a2
TS
218hashval_t
219const_wide_int_hasher::hash (rtx x)
807e902e
KZ
220{
221 int i;
d7ca26e4 222 unsigned HOST_WIDE_INT hash = 0;
aebf76a2 223 const_rtx xr = x;
807e902e
KZ
224
225 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
226 hash += CONST_WIDE_INT_ELT (xr, i);
227
228 return (hashval_t) hash;
229}
230
231/* Returns nonzero if the value represented by X (which is really a
232 CONST_WIDE_INT) is the same as that given by Y (which is really a
233 CONST_WIDE_INT). */
234
aebf76a2
TS
235bool
236const_wide_int_hasher::equal (rtx x, rtx y)
807e902e
KZ
237{
238 int i;
aebf76a2
TS
239 const_rtx xr = x;
240 const_rtx yr = y;
807e902e 241 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
aebf76a2 242 return false;
807e902e
KZ
243
244 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
245 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
aebf76a2 246 return false;
807e902e 247
aebf76a2 248 return true;
807e902e
KZ
249}
250#endif
251
5692c7bc 252/* Returns a hash code for X (which is really a CONST_DOUBLE). */
aebf76a2
TS
253hashval_t
254const_double_hasher::hash (rtx x)
5692c7bc 255{
aebf76a2 256 const_rtx const value = x;
46b33600 257 hashval_t h;
5692c7bc 258
807e902e 259 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
260 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
261 else
fe352c29 262 {
15c812e3 263 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
264 /* MODE is used in the comparison, so it should be in the hash. */
265 h ^= GET_MODE (value);
266 }
5692c7bc
ZW
267 return h;
268}
269
cc2902df 270/* Returns nonzero if the value represented by X (really a ...)
5692c7bc 271 is the same as that represented by Y (really a ...) */
aebf76a2
TS
272bool
273const_double_hasher::equal (rtx x, rtx y)
5692c7bc 274{
aebf76a2 275 const_rtx const a = x, b = y;
5692c7bc
ZW
276
277 if (GET_MODE (a) != GET_MODE (b))
278 return 0;
807e902e 279 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
280 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
281 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
282 else
283 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
284 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
285}
286
091a3ac7
CF
287/* Returns a hash code for X (which is really a CONST_FIXED). */
288
aebf76a2
TS
289hashval_t
290const_fixed_hasher::hash (rtx x)
091a3ac7 291{
aebf76a2 292 const_rtx const value = x;
091a3ac7
CF
293 hashval_t h;
294
295 h = fixed_hash (CONST_FIXED_VALUE (value));
296 /* MODE is used in the comparison, so it should be in the hash. */
297 h ^= GET_MODE (value);
298 return h;
299}
300
aebf76a2
TS
301/* Returns nonzero if the value represented by X is the same as that
302 represented by Y. */
091a3ac7 303
aebf76a2
TS
304bool
305const_fixed_hasher::equal (rtx x, rtx y)
091a3ac7 306{
aebf76a2 307 const_rtx const a = x, b = y;
091a3ac7
CF
308
309 if (GET_MODE (a) != GET_MODE (b))
310 return 0;
311 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
312}
313
f12144dd 314/* Return true if the given memory attributes are equal. */
c13e8210 315
96b3c03f 316bool
f12144dd 317mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 318{
96b3c03f
RB
319 if (p == q)
320 return true;
321 if (!p || !q)
322 return false;
754c3d5d
RS
323 return (p->alias == q->alias
324 && p->offset_known_p == q->offset_known_p
325 && (!p->offset_known_p || p->offset == q->offset)
326 && p->size_known_p == q->size_known_p
327 && (!p->size_known_p || p->size == q->size)
328 && p->align == q->align
09e881c9 329 && p->addrspace == q->addrspace
78b76d08
SB
330 && (p->expr == q->expr
331 || (p->expr != NULL_TREE && q->expr != NULL_TREE
332 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
333}
334
f12144dd 335/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 336
f12144dd
RS
337static void
338set_mem_attrs (rtx mem, mem_attrs *attrs)
339{
f12144dd
RS
340 /* If everything is the default, we can just clear the attributes. */
341 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
342 {
343 MEM_ATTRS (mem) = 0;
344 return;
345 }
173b24b9 346
84053e02
RB
347 if (!MEM_ATTRS (mem)
348 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 349 {
766090c2 350 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 351 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 352 }
c13e8210
MM
353}
354
a560d4d4
JH
355/* Returns a hash code for X (which is a really a reg_attrs *). */
356
aebf76a2
TS
357hashval_t
358reg_attr_hasher::hash (reg_attrs *x)
a560d4d4 359{
aebf76a2 360 const reg_attrs *const p = x;
a560d4d4 361
9841210f 362 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
363}
364
aebf76a2
TS
365/* Returns nonzero if the value represented by X is the same as that given by
366 Y. */
a560d4d4 367
aebf76a2
TS
368bool
369reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
a560d4d4 370{
aebf76a2
TS
371 const reg_attrs *const p = x;
372 const reg_attrs *const q = y;
a560d4d4
JH
373
374 return (p->decl == q->decl && p->offset == q->offset);
375}
376/* Allocate a new reg_attrs structure and insert it into the hash table if
377 one identical to it is not already in the table. We are doing this for
378 MEM of mode MODE. */
379
380static reg_attrs *
502b8322 381get_reg_attrs (tree decl, int offset)
a560d4d4
JH
382{
383 reg_attrs attrs;
a560d4d4
JH
384
385 /* If everything is the default, we can just return zero. */
386 if (decl == 0 && offset == 0)
387 return 0;
388
389 attrs.decl = decl;
390 attrs.offset = offset;
391
aebf76a2 392 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
a560d4d4
JH
393 if (*slot == 0)
394 {
766090c2 395 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
396 memcpy (*slot, &attrs, sizeof (reg_attrs));
397 }
398
aebf76a2 399 return *slot;
a560d4d4
JH
400}
401
6fb5fa3c
DB
402
403#if !HAVE_blockage
adddc347
HPN
404/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
405 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
406
407rtx
408gen_blockage (void)
409{
410 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
411 MEM_VOLATILE_P (x) = true;
412 return x;
413}
414#endif
415
416
8deccbb7
RS
417/* Set the mode and register number of X to MODE and REGNO. */
418
419void
420set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
421{
9188b286
RS
422 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
423 ? hard_regno_nregs[regno][mode]
424 : 1);
8deccbb7 425 PUT_MODE_RAW (x, mode);
9188b286 426 set_regno_raw (x, regno, nregs);
8deccbb7
RS
427}
428
08394eef
BS
429/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
430 don't attempt to share with the various global pieces of rtl (such as
431 frame_pointer_rtx). */
432
433rtx
8deccbb7 434gen_raw_REG (machine_mode mode, unsigned int regno)
08394eef 435{
2d44c7de 436 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
8deccbb7 437 set_mode_and_regno (x, mode, regno);
9fccb335 438 REG_ATTRS (x) = NULL;
08394eef
BS
439 ORIGINAL_REGNO (x) = regno;
440 return x;
441}
442
c5c76735
JL
443/* There are some RTL codes that require special attention; the generation
444 functions do the raw handling. If you add to this list, modify
445 special_rtx in gengenrtl.c as well. */
446
38e60c55 447rtx_expr_list *
ef4bddc2 448gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
38e60c55
DM
449{
450 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
451 expr_list));
452}
453
a756c6be 454rtx_insn_list *
ef4bddc2 455gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
a756c6be
DM
456{
457 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
458 insn_list));
459}
460
d6e1e8b8 461rtx_insn *
ef4bddc2 462gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
d6e1e8b8
DM
463 basic_block bb, rtx pattern, int location, int code,
464 rtx reg_notes)
465{
466 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
467 prev_insn, next_insn,
468 bb, pattern, location, code,
469 reg_notes));
470}
471
3b80f6ca 472rtx
ef4bddc2 473gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca
RH
474{
475 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 476 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
477
478#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
479 if (const_true_rtx && arg == STORE_FLAG_VALUE)
480 return const_true_rtx;
481#endif
482
c13e8210 483 /* Look up the CONST_INT in the hash table. */
aebf76a2
TS
484 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
485 INSERT);
29105cea 486 if (*slot == 0)
1f8f4a0b 487 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210 488
aebf76a2 489 return *slot;
3b80f6ca
RH
490}
491
2496c7bd 492rtx
ef4bddc2 493gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
2496c7bd
LB
494{
495 return GEN_INT (trunc_int_for_mode (c, mode));
496}
497
5692c7bc
ZW
498/* CONST_DOUBLEs might be created from pairs of integers, or from
499 REAL_VALUE_TYPEs. Also, their length is known only at run time,
500 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
501
502/* Determine whether REAL, a CONST_DOUBLE, already exists in the
503 hash table. If so, return its counterpart; otherwise add it
504 to the hash table and return it. */
505static rtx
502b8322 506lookup_const_double (rtx real)
5692c7bc 507{
aebf76a2 508 rtx *slot = const_double_htab->find_slot (real, INSERT);
5692c7bc
ZW
509 if (*slot == 0)
510 *slot = real;
511
aebf76a2 512 return *slot;
5692c7bc 513}
29105cea 514
5692c7bc
ZW
515/* Return a CONST_DOUBLE rtx for a floating-point value specified by
516 VALUE in mode MODE. */
0133b7d9 517rtx
ef4bddc2 518const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
0133b7d9 519{
5692c7bc
ZW
520 rtx real = rtx_alloc (CONST_DOUBLE);
521 PUT_MODE (real, mode);
522
9e254451 523 real->u.rv = value;
5692c7bc
ZW
524
525 return lookup_const_double (real);
526}
527
091a3ac7
CF
528/* Determine whether FIXED, a CONST_FIXED, already exists in the
529 hash table. If so, return its counterpart; otherwise add it
530 to the hash table and return it. */
531
532static rtx
533lookup_const_fixed (rtx fixed)
534{
aebf76a2 535 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
091a3ac7
CF
536 if (*slot == 0)
537 *slot = fixed;
538
aebf76a2 539 return *slot;
091a3ac7
CF
540}
541
542/* Return a CONST_FIXED rtx for a fixed-point value specified by
543 VALUE in mode MODE. */
544
545rtx
ef4bddc2 546const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
091a3ac7
CF
547{
548 rtx fixed = rtx_alloc (CONST_FIXED);
549 PUT_MODE (fixed, mode);
550
551 fixed->u.fv = value;
552
553 return lookup_const_fixed (fixed);
554}
555
807e902e 556#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
557/* Constructs double_int from rtx CST. */
558
559double_int
560rtx_to_double_int (const_rtx cst)
561{
562 double_int r;
563
564 if (CONST_INT_P (cst))
27bcd47c 565 r = double_int::from_shwi (INTVAL (cst));
48175537 566 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
567 {
568 r.low = CONST_DOUBLE_LOW (cst);
569 r.high = CONST_DOUBLE_HIGH (cst);
570 }
571 else
572 gcc_unreachable ();
573
574 return r;
575}
807e902e 576#endif
3e93ff81 577
807e902e
KZ
578#if TARGET_SUPPORTS_WIDE_INT
579/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
580 If so, return its counterpart; otherwise add it to the hash table and
581 return it. */
3e93ff81 582
807e902e
KZ
583static rtx
584lookup_const_wide_int (rtx wint)
585{
aebf76a2 586 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
807e902e
KZ
587 if (*slot == 0)
588 *slot = wint;
589
aebf76a2 590 return *slot;
807e902e
KZ
591}
592#endif
593
594/* Return an rtx constant for V, given that the constant has mode MODE.
595 The returned rtx will be a CONST_INT if V fits, otherwise it will be
596 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
597 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0
AS
598
599rtx
ef4bddc2 600immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
54fb1ae0 601{
807e902e
KZ
602 unsigned int len = v.get_len ();
603 unsigned int prec = GET_MODE_PRECISION (mode);
604
605 /* Allow truncation but not extension since we do not know if the
606 number is signed or unsigned. */
607 gcc_assert (prec <= v.get_precision ());
608
609 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
610 return gen_int_mode (v.elt (0), mode);
611
612#if TARGET_SUPPORTS_WIDE_INT
613 {
614 unsigned int i;
615 rtx value;
616 unsigned int blocks_needed
617 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
618
619 if (len > blocks_needed)
620 len = blocks_needed;
621
622 value = const_wide_int_alloc (len);
623
624 /* It is so tempting to just put the mode in here. Must control
625 myself ... */
626 PUT_MODE (value, VOIDmode);
627 CWI_PUT_NUM_ELEM (value, len);
628
629 for (i = 0; i < len; i++)
630 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
631
632 return lookup_const_wide_int (value);
633 }
634#else
635 return immed_double_const (v.elt (0), v.elt (1), mode);
636#endif
54fb1ae0
AS
637}
638
807e902e 639#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
640/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
641 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 642 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
643 implied upper bits are copies of the high bit of i1. The value
644 itself is neither signed nor unsigned. Do not use this routine for
645 non-integer modes; convert to REAL_VALUE_TYPE and use
555affd7 646 const_double_from_real_value. */
5692c7bc
ZW
647
648rtx
ef4bddc2 649immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
5692c7bc
ZW
650{
651 rtx value;
652 unsigned int i;
653
65acccdd 654 /* There are the following cases (note that there are no modes with
49ab6098 655 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
656
657 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
658 gen_int_mode.
929e10f4
MS
659 2) If the value of the integer fits into HOST_WIDE_INT anyway
660 (i.e., i1 consists only from copies of the sign bit, and sign
661 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 662 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
663 if (mode != VOIDmode)
664 {
5b0264cb
NS
665 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
666 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
667 /* We can get a 0 for an error mark. */
668 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
d5e254e1
IE
669 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
670 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
5692c7bc 671
65acccdd
ZD
672 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
673 return gen_int_mode (i0, mode);
5692c7bc
ZW
674 }
675
676 /* If this integer fits in one word, return a CONST_INT. */
677 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
678 return GEN_INT (i0);
679
680 /* We use VOIDmode for integers. */
681 value = rtx_alloc (CONST_DOUBLE);
682 PUT_MODE (value, VOIDmode);
683
684 CONST_DOUBLE_LOW (value) = i0;
685 CONST_DOUBLE_HIGH (value) = i1;
686
687 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
688 XWINT (value, i) = 0;
689
690 return lookup_const_double (value);
0133b7d9 691}
807e902e 692#endif
0133b7d9 693
3b80f6ca 694rtx
ef4bddc2 695gen_rtx_REG (machine_mode mode, unsigned int regno)
3b80f6ca
RH
696{
697 /* In case the MD file explicitly references the frame pointer, have
698 all such references point to the same frame pointer. This is
699 used during frame pointer elimination to distinguish the explicit
700 references to these registers from pseudos that happened to be
701 assigned to them.
702
703 If we have eliminated the frame pointer or arg pointer, we will
704 be using it as a normal register, for example as a spill
705 register. In such cases, we might be accessing it in a mode that
706 is not Pmode and therefore cannot use the pre-allocated rtx.
707
708 Also don't do this when we are making new REGs in reload, since
709 we don't want to get confused with the real pointers. */
710
55a2c322 711 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 712 {
e10c79fe
LB
713 if (regno == FRAME_POINTER_REGNUM
714 && (!reload_completed || frame_pointer_needed))
3b80f6ca 715 return frame_pointer_rtx;
c3e08036
TS
716
717 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
718 && regno == HARD_FRAME_POINTER_REGNUM
e10c79fe 719 && (!reload_completed || frame_pointer_needed))
3b80f6ca 720 return hard_frame_pointer_rtx;
3f393fc6
TS
721#if !HARD_FRAME_POINTER_IS_ARG_POINTER
722 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
723 && regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
724 return arg_pointer_rtx;
725#endif
726#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 727 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
728 return return_address_pointer_rtx;
729#endif
fc555370 730 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 731 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 732 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 733 return pic_offset_table_rtx;
bcb33994 734 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
735 return stack_pointer_rtx;
736 }
737
006a94b0 738#if 0
6cde4876 739 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
740 an existing entry in that table to avoid useless generation of RTL.
741
742 This code is disabled for now until we can fix the various backends
743 which depend on having non-shared hard registers in some cases. Long
744 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
745 on the amount of useless RTL that gets generated.
746
747 We'll also need to fix some code that runs after reload that wants to
748 set ORIGINAL_REGNO. */
749
6cde4876
JL
750 if (cfun
751 && cfun->emit
752 && regno_reg_rtx
753 && regno < FIRST_PSEUDO_REGISTER
754 && reg_raw_mode[regno] == mode)
755 return regno_reg_rtx[regno];
006a94b0 756#endif
6cde4876 757
08394eef 758 return gen_raw_REG (mode, regno);
3b80f6ca
RH
759}
760
41472af8 761rtx
ef4bddc2 762gen_rtx_MEM (machine_mode mode, rtx addr)
41472af8
MM
763{
764 rtx rt = gen_rtx_raw_MEM (mode, addr);
765
766 /* This field is not cleared by the mere allocation of the rtx, so
767 we clear it here. */
173b24b9 768 MEM_ATTRS (rt) = 0;
41472af8
MM
769
770 return rt;
771}
ddef6bc7 772
542a8afa
RH
773/* Generate a memory referring to non-trapping constant memory. */
774
775rtx
ef4bddc2 776gen_const_mem (machine_mode mode, rtx addr)
542a8afa
RH
777{
778 rtx mem = gen_rtx_MEM (mode, addr);
779 MEM_READONLY_P (mem) = 1;
780 MEM_NOTRAP_P (mem) = 1;
781 return mem;
782}
783
bf877a76
R
784/* Generate a MEM referring to fixed portions of the frame, e.g., register
785 save areas. */
786
787rtx
ef4bddc2 788gen_frame_mem (machine_mode mode, rtx addr)
bf877a76
R
789{
790 rtx mem = gen_rtx_MEM (mode, addr);
791 MEM_NOTRAP_P (mem) = 1;
792 set_mem_alias_set (mem, get_frame_alias_set ());
793 return mem;
794}
795
796/* Generate a MEM referring to a temporary use of the stack, not part
797 of the fixed stack frame. For example, something which is pushed
798 by a target splitter. */
799rtx
ef4bddc2 800gen_tmp_stack_mem (machine_mode mode, rtx addr)
bf877a76
R
801{
802 rtx mem = gen_rtx_MEM (mode, addr);
803 MEM_NOTRAP_P (mem) = 1;
e3b5732b 804 if (!cfun->calls_alloca)
bf877a76
R
805 set_mem_alias_set (mem, get_frame_alias_set ());
806 return mem;
807}
808
beb72684
RH
809/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
810 this construct would be valid, and false otherwise. */
811
812bool
ef4bddc2 813validate_subreg (machine_mode omode, machine_mode imode,
ed7a4b4b 814 const_rtx reg, unsigned int offset)
ddef6bc7 815{
beb72684
RH
816 unsigned int isize = GET_MODE_SIZE (imode);
817 unsigned int osize = GET_MODE_SIZE (omode);
818
819 /* All subregs must be aligned. */
820 if (offset % osize != 0)
821 return false;
822
823 /* The subreg offset cannot be outside the inner object. */
824 if (offset >= isize)
825 return false;
826
827 /* ??? This should not be here. Temporarily continue to allow word_mode
828 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
829 Generally, backends are doing something sketchy but it'll take time to
830 fix them all. */
831 if (omode == word_mode)
832 ;
833 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
834 is the culprit here, and not the backends. */
835 else if (osize >= UNITS_PER_WORD && isize >= osize)
836 ;
837 /* Allow component subregs of complex and vector. Though given the below
838 extraction rules, it's not always clear what that means. */
839 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
840 && GET_MODE_INNER (imode) == omode)
841 ;
842 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
843 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
844 represent this. It's questionable if this ought to be represented at
845 all -- why can't this all be hidden in post-reload splitters that make
846 arbitrarily mode changes to the registers themselves. */
847 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
848 ;
849 /* Subregs involving floating point modes are not allowed to
850 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
851 (subreg:SI (reg:DF) 0) isn't. */
852 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
853 {
55a2c322
VM
854 if (! (isize == osize
855 /* LRA can use subreg to store a floating point value in
856 an integer mode. Although the floating point and the
857 integer modes need the same number of hard registers,
858 the size of floating point mode can be less than the
859 integer mode. LRA also uses subregs for a register
860 should be used in different mode in on insn. */
861 || lra_in_progress))
beb72684
RH
862 return false;
863 }
ddef6bc7 864
beb72684
RH
865 /* Paradoxical subregs must have offset zero. */
866 if (osize > isize)
867 return offset == 0;
868
869 /* This is a normal subreg. Verify that the offset is representable. */
870
871 /* For hard registers, we already have most of these rules collected in
872 subreg_offset_representable_p. */
873 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
874 {
875 unsigned int regno = REGNO (reg);
876
877#ifdef CANNOT_CHANGE_MODE_CLASS
878 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
879 && GET_MODE_INNER (imode) == omode)
880 ;
881 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
882 return false;
ddef6bc7 883#endif
beb72684
RH
884
885 return subreg_offset_representable_p (regno, imode, offset, omode);
886 }
887
888 /* For pseudo registers, we want most of the same checks. Namely:
889 If the register no larger than a word, the subreg must be lowpart.
890 If the register is larger than a word, the subreg must be the lowpart
891 of a subword. A subreg does *not* perform arbitrary bit extraction.
892 Given that we've already checked mode/offset alignment, we only have
893 to check subword subregs here. */
55a2c322
VM
894 if (osize < UNITS_PER_WORD
895 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684 896 {
ef4bddc2 897 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
beb72684
RH
898 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
899 if (offset % UNITS_PER_WORD != low_off)
900 return false;
901 }
902 return true;
903}
904
905rtx
ef4bddc2 906gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
beb72684
RH
907{
908 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 909 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
910}
911
173b24b9
RK
912/* Generate a SUBREG representing the least-significant part of REG if MODE
913 is smaller than mode of REG, otherwise paradoxical SUBREG. */
914
ddef6bc7 915rtx
ef4bddc2 916gen_lowpart_SUBREG (machine_mode mode, rtx reg)
ddef6bc7 917{
ef4bddc2 918 machine_mode inmode;
ddef6bc7
JJ
919
920 inmode = GET_MODE (reg);
921 if (inmode == VOIDmode)
922 inmode = mode;
e0e08ac2
JH
923 return gen_rtx_SUBREG (mode, reg,
924 subreg_lowpart_offset (mode, inmode));
ddef6bc7 925}
fcc74520
RS
926
927rtx
ef4bddc2 928gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
fcc74520
RS
929 enum var_init_status status)
930{
931 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
932 PAT_VAR_LOCATION_STATUS (x) = status;
933 return x;
934}
c5c76735 935\f
23b2ce53 936
80379f51
PB
937/* Create an rtvec and stores within it the RTXen passed in the arguments. */
938
23b2ce53 939rtvec
e34d07f2 940gen_rtvec (int n, ...)
23b2ce53 941{
80379f51
PB
942 int i;
943 rtvec rt_val;
e34d07f2 944 va_list p;
23b2ce53 945
e34d07f2 946 va_start (p, n);
23b2ce53 947
80379f51 948 /* Don't allocate an empty rtvec... */
23b2ce53 949 if (n == 0)
0edf1bb2
JL
950 {
951 va_end (p);
952 return NULL_RTVEC;
953 }
23b2ce53 954
80379f51 955 rt_val = rtvec_alloc (n);
4f90e4a0 956
23b2ce53 957 for (i = 0; i < n; i++)
80379f51 958 rt_val->elem[i] = va_arg (p, rtx);
6268b922 959
e34d07f2 960 va_end (p);
80379f51 961 return rt_val;
23b2ce53
RS
962}
963
964rtvec
502b8322 965gen_rtvec_v (int n, rtx *argp)
23b2ce53 966{
b3694847
SS
967 int i;
968 rtvec rt_val;
23b2ce53 969
80379f51 970 /* Don't allocate an empty rtvec... */
23b2ce53 971 if (n == 0)
80379f51 972 return NULL_RTVEC;
23b2ce53 973
80379f51 974 rt_val = rtvec_alloc (n);
23b2ce53
RS
975
976 for (i = 0; i < n; i++)
8f985ec4 977 rt_val->elem[i] = *argp++;
23b2ce53
RS
978
979 return rt_val;
980}
e6eda746
DM
981
982rtvec
983gen_rtvec_v (int n, rtx_insn **argp)
984{
985 int i;
986 rtvec rt_val;
987
988 /* Don't allocate an empty rtvec... */
989 if (n == 0)
990 return NULL_RTVEC;
991
992 rt_val = rtvec_alloc (n);
993
994 for (i = 0; i < n; i++)
995 rt_val->elem[i] = *argp++;
996
997 return rt_val;
998}
999
23b2ce53 1000\f
38ae7651
RS
1001/* Return the number of bytes between the start of an OUTER_MODE
1002 in-memory value and the start of an INNER_MODE in-memory value,
1003 given that the former is a lowpart of the latter. It may be a
1004 paradoxical lowpart, in which case the offset will be negative
1005 on big-endian targets. */
1006
1007int
ef4bddc2
RS
1008byte_lowpart_offset (machine_mode outer_mode,
1009 machine_mode inner_mode)
38ae7651
RS
1010{
1011 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1012 return subreg_lowpart_offset (outer_mode, inner_mode);
1013 else
1014 return -subreg_lowpart_offset (inner_mode, outer_mode);
1015}
1016\f
23b2ce53
RS
1017/* Generate a REG rtx for a new pseudo register of mode MODE.
1018 This pseudo is assigned the next sequential register number. */
1019
1020rtx
ef4bddc2 1021gen_reg_rtx (machine_mode mode)
23b2ce53 1022{
b3694847 1023 rtx val;
2e3f842f 1024 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 1025
f8335a4f 1026 gcc_assert (can_create_pseudo_p ());
23b2ce53 1027
2e3f842f
L
1028 /* If a virtual register with bigger mode alignment is generated,
1029 increase stack alignment estimation because it might be spilled
1030 to stack later. */
b8698a0f 1031 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
1032 && crtl->stack_alignment_estimated < align
1033 && !crtl->stack_realign_processed)
ae58e548
JJ
1034 {
1035 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1036 if (crtl->stack_alignment_estimated < min_align)
1037 crtl->stack_alignment_estimated = min_align;
1038 }
2e3f842f 1039
1b3d8f8a
GK
1040 if (generating_concat_p
1041 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1042 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
1043 {
1044 /* For complex modes, don't make a single pseudo.
1045 Instead, make a CONCAT of two pseudos.
1046 This allows noncontiguous allocation of the real and imaginary parts,
1047 which makes much better code. Besides, allocating DCmode
1048 pseudos overstrains reload on some machines like the 386. */
1049 rtx realpart, imagpart;
ef4bddc2 1050 machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1051
1052 realpart = gen_reg_rtx (partmode);
1053 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1054 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1055 }
1056
004a7e45
UB
1057 /* Do not call gen_reg_rtx with uninitialized crtl. */
1058 gcc_assert (crtl->emit.regno_pointer_align_length);
1059
f44986d7
DM
1060 crtl->emit.ensure_regno_capacity ();
1061 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
23b2ce53 1062
f44986d7
DM
1063 val = gen_raw_REG (mode, reg_rtx_no);
1064 regno_reg_rtx[reg_rtx_no++] = val;
1065 return val;
1066}
0d4903b8 1067
f44986d7
DM
1068/* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1069 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
49ad7cfa 1070
f44986d7
DM
1071void
1072emit_status::ensure_regno_capacity ()
1073{
1074 int old_size = regno_pointer_align_length;
23b2ce53 1075
f44986d7
DM
1076 if (reg_rtx_no < old_size)
1077 return;
23b2ce53 1078
f44986d7
DM
1079 int new_size = old_size * 2;
1080 while (reg_rtx_no >= new_size)
1081 new_size *= 2;
1082
1083 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1084 memset (tmp + old_size, 0, new_size - old_size);
1085 regno_pointer_align = (unsigned char *) tmp;
1086
1087 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1088 memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1089 regno_reg_rtx = new1;
1090
1091 crtl->emit.regno_pointer_align_length = new_size;
23b2ce53
RS
1092}
1093
a698cc03
JL
1094/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1095
1096bool
1097reg_is_parm_p (rtx reg)
1098{
1099 tree decl;
1100
1101 gcc_assert (REG_P (reg));
1102 decl = REG_EXPR (reg);
1103 return (decl && TREE_CODE (decl) == PARM_DECL);
1104}
1105
38ae7651
RS
1106/* Update NEW with the same attributes as REG, but with OFFSET added
1107 to the REG_OFFSET. */
a560d4d4 1108
e53a16e7 1109static void
60564289 1110update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 1111{
60564289 1112 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 1113 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1114}
1115
38ae7651
RS
1116/* Generate a register with same attributes as REG, but with OFFSET
1117 added to the REG_OFFSET. */
e53a16e7
ILT
1118
1119rtx
ef4bddc2 1120gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
e53a16e7
ILT
1121 int offset)
1122{
60564289 1123 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1124
60564289
KG
1125 update_reg_offset (new_rtx, reg, offset);
1126 return new_rtx;
e53a16e7
ILT
1127}
1128
1129/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1130 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1131
1132rtx
ef4bddc2 1133gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
e53a16e7 1134{
60564289 1135 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1136
60564289
KG
1137 update_reg_offset (new_rtx, reg, offset);
1138 return new_rtx;
a560d4d4
JH
1139}
1140
38ae7651
RS
1141/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1142 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1143
1144void
ef4bddc2 1145adjust_reg_mode (rtx reg, machine_mode mode)
a560d4d4 1146{
38ae7651
RS
1147 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1148 PUT_MODE (reg, mode);
1149}
1150
1151/* Copy REG's attributes from X, if X has any attributes. If REG and X
1152 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1153
1154void
1155set_reg_attrs_from_value (rtx reg, rtx x)
1156{
1157 int offset;
de6f3f7a
L
1158 bool can_be_reg_pointer = true;
1159
1160 /* Don't call mark_reg_pointer for incompatible pointer sign
1161 extension. */
1162 while (GET_CODE (x) == SIGN_EXTEND
1163 || GET_CODE (x) == ZERO_EXTEND
1164 || GET_CODE (x) == TRUNCATE
1165 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1166 {
2a870875
RS
1167#if defined(POINTERS_EXTEND_UNSIGNED)
1168 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
8d8e740c
BE
1169 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1170 || (paradoxical_subreg_p (x)
1171 && ! (SUBREG_PROMOTED_VAR_P (x)
1172 && SUBREG_CHECK_PROMOTED_SIGN (x,
1173 POINTERS_EXTEND_UNSIGNED))))
2a870875 1174 && !targetm.have_ptr_extend ())
de6f3f7a
L
1175 can_be_reg_pointer = false;
1176#endif
1177 x = XEXP (x, 0);
1178 }
38ae7651 1179
923ba36f
JJ
1180 /* Hard registers can be reused for multiple purposes within the same
1181 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1182 on them is wrong. */
1183 if (HARD_REGISTER_P (reg))
1184 return;
1185
38ae7651 1186 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1187 if (MEM_P (x))
1188 {
527210c4
RS
1189 if (MEM_OFFSET_KNOWN_P (x))
1190 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1191 MEM_OFFSET (x) + offset);
de6f3f7a 1192 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1193 mark_reg_pointer (reg, 0);
46b71b03
PB
1194 }
1195 else if (REG_P (x))
1196 {
1197 if (REG_ATTRS (x))
1198 update_reg_offset (reg, x, offset);
de6f3f7a 1199 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1200 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1201 }
1202}
1203
1204/* Generate a REG rtx for a new pseudo register, copying the mode
1205 and attributes from X. */
1206
1207rtx
1208gen_reg_rtx_and_attrs (rtx x)
1209{
1210 rtx reg = gen_reg_rtx (GET_MODE (x));
1211 set_reg_attrs_from_value (reg, x);
1212 return reg;
a560d4d4
JH
1213}
1214
9d18e06b
JZ
1215/* Set the register attributes for registers contained in PARM_RTX.
1216 Use needed values from memory attributes of MEM. */
1217
1218void
502b8322 1219set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1220{
f8cfc6aa 1221 if (REG_P (parm_rtx))
38ae7651 1222 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1223 else if (GET_CODE (parm_rtx) == PARALLEL)
1224 {
1225 /* Check for a NULL entry in the first slot, used to indicate that the
1226 parameter goes both on the stack and in registers. */
1227 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1228 for (; i < XVECLEN (parm_rtx, 0); i++)
1229 {
1230 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1231 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1232 REG_ATTRS (XEXP (x, 0))
1233 = get_reg_attrs (MEM_EXPR (mem),
1234 INTVAL (XEXP (x, 1)));
1235 }
1236 }
1237}
1238
38ae7651
RS
1239/* Set the REG_ATTRS for registers in value X, given that X represents
1240 decl T. */
a560d4d4 1241
4e3825db 1242void
38ae7651
RS
1243set_reg_attrs_for_decl_rtl (tree t, rtx x)
1244{
1f9ceff1
AO
1245 if (!t)
1246 return;
1247 tree tdecl = t;
38ae7651 1248 if (GET_CODE (x) == SUBREG)
fbe6ec81 1249 {
38ae7651
RS
1250 gcc_assert (subreg_lowpart_p (x));
1251 x = SUBREG_REG (x);
fbe6ec81 1252 }
f8cfc6aa 1253 if (REG_P (x))
38ae7651
RS
1254 REG_ATTRS (x)
1255 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1f9ceff1
AO
1256 DECL_P (tdecl)
1257 ? DECL_MODE (tdecl)
1258 : TYPE_MODE (TREE_TYPE (tdecl))));
a560d4d4
JH
1259 if (GET_CODE (x) == CONCAT)
1260 {
1261 if (REG_P (XEXP (x, 0)))
1262 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1263 if (REG_P (XEXP (x, 1)))
1264 REG_ATTRS (XEXP (x, 1))
1265 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1266 }
1267 if (GET_CODE (x) == PARALLEL)
1268 {
d4afac5b
JZ
1269 int i, start;
1270
1271 /* Check for a NULL entry, used to indicate that the parameter goes
1272 both on the stack and in registers. */
1273 if (XEXP (XVECEXP (x, 0, 0), 0))
1274 start = 0;
1275 else
1276 start = 1;
1277
1278 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1279 {
1280 rtx y = XVECEXP (x, 0, i);
1281 if (REG_P (XEXP (y, 0)))
1282 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1283 }
1284 }
1285}
1286
38ae7651
RS
1287/* Assign the RTX X to declaration T. */
1288
1289void
1290set_decl_rtl (tree t, rtx x)
1291{
1292 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1293 if (x)
1294 set_reg_attrs_for_decl_rtl (t, x);
1295}
1296
5141868d
RS
1297/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1298 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1299
1300void
5141868d 1301set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1302{
1303 DECL_INCOMING_RTL (t) = x;
5141868d 1304 if (x && !by_reference_p)
38ae7651
RS
1305 set_reg_attrs_for_decl_rtl (t, x);
1306}
1307
754fdcca
RK
1308/* Identify REG (which may be a CONCAT) as a user register. */
1309
1310void
502b8322 1311mark_user_reg (rtx reg)
754fdcca
RK
1312{
1313 if (GET_CODE (reg) == CONCAT)
1314 {
1315 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1316 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1317 }
754fdcca 1318 else
5b0264cb
NS
1319 {
1320 gcc_assert (REG_P (reg));
1321 REG_USERVAR_P (reg) = 1;
1322 }
754fdcca
RK
1323}
1324
86fe05e0
RK
1325/* Identify REG as a probable pointer register and show its alignment
1326 as ALIGN, if nonzero. */
23b2ce53
RS
1327
1328void
502b8322 1329mark_reg_pointer (rtx reg, int align)
23b2ce53 1330{
3502dc9c 1331 if (! REG_POINTER (reg))
00995e78 1332 {
3502dc9c 1333 REG_POINTER (reg) = 1;
86fe05e0 1334
00995e78
RE
1335 if (align)
1336 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1337 }
1338 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1339 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1340 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1341}
1342
1343/* Return 1 plus largest pseudo reg number used in the current function. */
1344
1345int
502b8322 1346max_reg_num (void)
23b2ce53
RS
1347{
1348 return reg_rtx_no;
1349}
1350
1351/* Return 1 + the largest label number used so far in the current function. */
1352
1353int
502b8322 1354max_label_num (void)
23b2ce53 1355{
23b2ce53
RS
1356 return label_num;
1357}
1358
1359/* Return first label number used in this function (if any were used). */
1360
1361int
502b8322 1362get_first_label_num (void)
23b2ce53
RS
1363{
1364 return first_label_num;
1365}
6de9cd9a
DN
1366
1367/* If the rtx for label was created during the expansion of a nested
1368 function, then first_label_num won't include this label number.
fa10beec 1369 Fix this now so that array indices work later. */
6de9cd9a
DN
1370
1371void
9aa50db7 1372maybe_set_first_label_num (rtx_code_label *x)
6de9cd9a
DN
1373{
1374 if (CODE_LABEL_NUMBER (x) < first_label_num)
1375 first_label_num = CODE_LABEL_NUMBER (x);
1376}
23b2ce53
RS
1377\f
1378/* Return a value representing some low-order bits of X, where the number
1379 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1380 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1381 representation is returned.
1382
1383 This function handles the cases in common between gen_lowpart, below,
1384 and two variants in cse.c and combine.c. These are the cases that can
1385 be safely handled at all points in the compilation.
1386
1387 If this is not a case we can handle, return 0. */
1388
1389rtx
ef4bddc2 1390gen_lowpart_common (machine_mode mode, rtx x)
23b2ce53 1391{
ddef6bc7 1392 int msize = GET_MODE_SIZE (mode);
550d1387 1393 int xsize;
ef4bddc2 1394 machine_mode innermode;
550d1387
GK
1395
1396 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1397 so we have to make one up. Yuk. */
1398 innermode = GET_MODE (x);
481683e1 1399 if (CONST_INT_P (x)
db487452 1400 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1401 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1402 else if (innermode == VOIDmode)
49ab6098 1403 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1404
550d1387
GK
1405 xsize = GET_MODE_SIZE (innermode);
1406
5b0264cb 1407 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1408
550d1387 1409 if (innermode == mode)
23b2ce53
RS
1410 return x;
1411
1412 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1413 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1414 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1415 return 0;
1416
53501a19 1417 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1418 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1419 return 0;
1420
23b2ce53 1421 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1422 && (GET_MODE_CLASS (mode) == MODE_INT
1423 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1424 {
1425 /* If we are getting the low-order part of something that has been
1426 sign- or zero-extended, we can either just use the object being
1427 extended or make a narrower extension. If we want an even smaller
1428 piece than the size of the object being extended, call ourselves
1429 recursively.
1430
1431 This case is used mostly by combine and cse. */
1432
1433 if (GET_MODE (XEXP (x, 0)) == mode)
1434 return XEXP (x, 0);
550d1387 1435 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1436 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1437 else if (msize < xsize)
3b80f6ca 1438 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1439 }
f8cfc6aa 1440 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1441 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1442 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
3403a1a9 1443 return lowpart_subreg (mode, x, innermode);
8aada4ad 1444
23b2ce53
RS
1445 /* Otherwise, we can't do this. */
1446 return 0;
1447}
1448\f
ccba022b 1449rtx
ef4bddc2 1450gen_highpart (machine_mode mode, rtx x)
ccba022b 1451{
ddef6bc7 1452 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1453 rtx result;
ddef6bc7 1454
ccba022b
RS
1455 /* This case loses if X is a subreg. To catch bugs early,
1456 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1457 gcc_assert (msize <= UNITS_PER_WORD
1458 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1459
e0e08ac2
JH
1460 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1461 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1462 gcc_assert (result);
b8698a0f 1463
09482e0d
JW
1464 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1465 the target if we have a MEM. gen_highpart must return a valid operand,
1466 emitting code if necessary to do so. */
5b0264cb
NS
1467 if (MEM_P (result))
1468 {
1469 result = validize_mem (result);
1470 gcc_assert (result);
1471 }
b8698a0f 1472
e0e08ac2
JH
1473 return result;
1474}
5222e470 1475
26d249eb 1476/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1477 be VOIDmode constant. */
1478rtx
ef4bddc2 1479gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
5222e470
JH
1480{
1481 if (GET_MODE (exp) != VOIDmode)
1482 {
5b0264cb 1483 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1484 return gen_highpart (outermode, exp);
1485 }
1486 return simplify_gen_subreg (outermode, exp, innermode,
1487 subreg_highpart_offset (outermode, innermode));
1488}
68252e27 1489
33951763
RS
1490/* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1491 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
8698cce3 1492
e0e08ac2 1493unsigned int
33951763 1494subreg_size_lowpart_offset (unsigned int outer_bytes, unsigned int inner_bytes)
e0e08ac2 1495{
33951763
RS
1496 if (outer_bytes > inner_bytes)
1497 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1498 return 0;
ddef6bc7 1499
33951763
RS
1500 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1501 return inner_bytes - outer_bytes;
1502 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1503 return 0;
1504 else
1505 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
ccba022b 1506}
eea50aa0 1507
33951763
RS
1508/* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1509 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1510
e0e08ac2 1511unsigned int
33951763
RS
1512subreg_size_highpart_offset (unsigned int outer_bytes,
1513 unsigned int inner_bytes)
eea50aa0 1514{
33951763 1515 gcc_assert (inner_bytes >= outer_bytes);
eea50aa0 1516
33951763
RS
1517 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1518 return 0;
1519 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1520 return inner_bytes - outer_bytes;
1521 else
1522 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1523 (inner_bytes - outer_bytes)
1524 * BITS_PER_UNIT);
eea50aa0 1525}
ccba022b 1526
23b2ce53
RS
1527/* Return 1 iff X, assumed to be a SUBREG,
1528 refers to the least significant part of its containing reg.
1529 If X is not a SUBREG, always return 1 (it is its own low part!). */
1530
1531int
fa233e34 1532subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1533{
1534 if (GET_CODE (x) != SUBREG)
1535 return 1;
a3a03040
RK
1536 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1537 return 0;
23b2ce53 1538
e0e08ac2
JH
1539 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1540 == SUBREG_BYTE (x));
23b2ce53 1541}
6a4bdc79
BS
1542
1543/* Return true if X is a paradoxical subreg, false otherwise. */
1544bool
1545paradoxical_subreg_p (const_rtx x)
1546{
1547 if (GET_CODE (x) != SUBREG)
1548 return false;
1549 return (GET_MODE_PRECISION (GET_MODE (x))
1550 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1551}
23b2ce53 1552\f
ddef6bc7
JJ
1553/* Return subword OFFSET of operand OP.
1554 The word number, OFFSET, is interpreted as the word number starting
1555 at the low-order address. OFFSET 0 is the low-order word if not
1556 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1557
1558 If we cannot extract the required word, we return zero. Otherwise,
1559 an rtx corresponding to the requested word will be returned.
1560
1561 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1562 reload has completed, a valid address will always be returned. After
1563 reload, if a valid address cannot be returned, we return zero.
1564
1565 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1566 it is the responsibility of the caller.
1567
1568 MODE is the mode of OP in case it is a CONST_INT.
1569
1570 ??? This is still rather broken for some cases. The problem for the
1571 moment is that all callers of this thing provide no 'goal mode' to
1572 tell us to work with. This exists because all callers were written
0631e0bf
JH
1573 in a word based SUBREG world.
1574 Now use of this function can be deprecated by simplify_subreg in most
1575 cases.
1576 */
ddef6bc7
JJ
1577
1578rtx
ef4bddc2 1579operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
ddef6bc7
JJ
1580{
1581 if (mode == VOIDmode)
1582 mode = GET_MODE (op);
1583
5b0264cb 1584 gcc_assert (mode != VOIDmode);
ddef6bc7 1585
30f7a378 1586 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1587 if (mode != BLKmode
1588 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1589 return 0;
1590
30f7a378 1591 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1592 if (mode != BLKmode
1593 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1594 return const0_rtx;
1595
ddef6bc7 1596 /* Form a new MEM at the requested address. */
3c0cb5de 1597 if (MEM_P (op))
ddef6bc7 1598 {
60564289 1599 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1600
f1ec5147 1601 if (! validate_address)
60564289 1602 return new_rtx;
f1ec5147
RK
1603
1604 else if (reload_completed)
ddef6bc7 1605 {
09e881c9
BE
1606 if (! strict_memory_address_addr_space_p (word_mode,
1607 XEXP (new_rtx, 0),
1608 MEM_ADDR_SPACE (op)))
f1ec5147 1609 return 0;
ddef6bc7 1610 }
f1ec5147 1611 else
60564289 1612 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1613 }
1614
0631e0bf
JH
1615 /* Rest can be handled by simplify_subreg. */
1616 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1617}
1618
535a42b1
NS
1619/* Similar to `operand_subword', but never return 0. If we can't
1620 extract the required subword, put OP into a register and try again.
1621 The second attempt must succeed. We always validate the address in
1622 this case.
23b2ce53
RS
1623
1624 MODE is the mode of OP, in case it is CONST_INT. */
1625
1626rtx
ef4bddc2 1627operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
23b2ce53 1628{
ddef6bc7 1629 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1630
1631 if (result)
1632 return result;
1633
1634 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1635 {
1636 /* If this is a register which can not be accessed by words, copy it
1637 to a pseudo register. */
f8cfc6aa 1638 if (REG_P (op))
77e6b0eb
JC
1639 op = copy_to_reg (op);
1640 else
1641 op = force_reg (mode, op);
1642 }
23b2ce53 1643
ddef6bc7 1644 result = operand_subword (op, offset, 1, mode);
5b0264cb 1645 gcc_assert (result);
23b2ce53
RS
1646
1647 return result;
1648}
1649\f
2b3493c8
AK
1650/* Returns 1 if both MEM_EXPR can be considered equal
1651 and 0 otherwise. */
1652
1653int
4f588890 1654mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1655{
1656 if (expr1 == expr2)
1657 return 1;
1658
1659 if (! expr1 || ! expr2)
1660 return 0;
1661
1662 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1663 return 0;
1664
55b34b5f 1665 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1666}
1667
805903b5
JJ
1668/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1669 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1670 -1 if not known. */
1671
1672int
d9223014 1673get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1674{
1675 tree expr;
1676 unsigned HOST_WIDE_INT offset;
1677
1678 /* This function can't use
527210c4 1679 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1680 || (MAX (MEM_ALIGN (mem),
0eb77834 1681 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1682 < align))
1683 return -1;
1684 else
527210c4 1685 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1686 for two reasons:
1687 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1688 for <variable>. get_inner_reference doesn't handle it and
1689 even if it did, the alignment in that case needs to be determined
1690 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1691 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1692 isn't sufficiently aligned, the object it is in might be. */
1693 gcc_assert (MEM_P (mem));
1694 expr = MEM_EXPR (mem);
527210c4 1695 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1696 return -1;
1697
527210c4 1698 offset = MEM_OFFSET (mem);
805903b5
JJ
1699 if (DECL_P (expr))
1700 {
1701 if (DECL_ALIGN (expr) < align)
1702 return -1;
1703 }
1704 else if (INDIRECT_REF_P (expr))
1705 {
1706 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1707 return -1;
1708 }
1709 else if (TREE_CODE (expr) == COMPONENT_REF)
1710 {
1711 while (1)
1712 {
1713 tree inner = TREE_OPERAND (expr, 0);
1714 tree field = TREE_OPERAND (expr, 1);
1715 tree byte_offset = component_ref_field_offset (expr);
1716 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1717
1718 if (!byte_offset
cc269bb6
RS
1719 || !tree_fits_uhwi_p (byte_offset)
1720 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1721 return -1;
1722
ae7e9ddd
RS
1723 offset += tree_to_uhwi (byte_offset);
1724 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1725
1726 if (inner == NULL_TREE)
1727 {
1728 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1729 < (unsigned int) align)
1730 return -1;
1731 break;
1732 }
1733 else if (DECL_P (inner))
1734 {
1735 if (DECL_ALIGN (inner) < align)
1736 return -1;
1737 break;
1738 }
1739 else if (TREE_CODE (inner) != COMPONENT_REF)
1740 return -1;
1741 expr = inner;
1742 }
1743 }
1744 else
1745 return -1;
1746
1747 return offset & ((align / BITS_PER_UNIT) - 1);
1748}
1749
6926c713 1750/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1751 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1752 if we are making a new object of this type. BITPOS is nonzero if
1753 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1754
1755void
502b8322
AJ
1756set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1757 HOST_WIDE_INT bitpos)
173b24b9 1758{
6f1087be 1759 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1760 tree type;
f12144dd 1761 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1762 addr_space_t as;
173b24b9
RK
1763
1764 /* It can happen that type_for_mode was given a mode for which there
1765 is no language-level type. In which case it returns NULL, which
1766 we can see here. */
1767 if (t == NULL_TREE)
1768 return;
1769
1770 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1771 if (type == error_mark_node)
1772 return;
173b24b9 1773
173b24b9
RK
1774 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1775 wrong answer, as it assumes that DECL_RTL already has the right alias
1776 info. Callers should not set DECL_RTL until after the call to
1777 set_mem_attributes. */
5b0264cb 1778 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1779
f12144dd
RS
1780 memset (&attrs, 0, sizeof (attrs));
1781
738cc472 1782 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1783 front-end routine) and use it. */
f12144dd 1784 attrs.alias = get_alias_set (t);
173b24b9 1785
a5e9c810 1786 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1787 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1788
268f7033 1789 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1790 refattrs = MEM_ATTRS (ref);
1791 if (refattrs)
268f7033
UW
1792 {
1793 /* ??? Can this ever happen? Calling this routine on a MEM that
1794 already carries memory attributes should probably be invalid. */
f12144dd 1795 attrs.expr = refattrs->expr;
754c3d5d 1796 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1797 attrs.offset = refattrs->offset;
754c3d5d 1798 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1799 attrs.size = refattrs->size;
1800 attrs.align = refattrs->align;
268f7033
UW
1801 }
1802
1803 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1804 else
268f7033 1805 {
f12144dd
RS
1806 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1807 gcc_assert (!defattrs->expr);
754c3d5d 1808 gcc_assert (!defattrs->offset_known_p);
f12144dd 1809
268f7033 1810 /* Respect mode size. */
754c3d5d 1811 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1812 attrs.size = defattrs->size;
268f7033
UW
1813 /* ??? Is this really necessary? We probably should always get
1814 the size from the type below. */
1815
1816 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1817 if T is an object, always compute the object alignment below. */
f12144dd
RS
1818 if (TYPE_P (t))
1819 attrs.align = defattrs->align;
1820 else
1821 attrs.align = BITS_PER_UNIT;
268f7033
UW
1822 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1823 e.g. if the type carries an alignment attribute. Should we be
1824 able to simply always use TYPE_ALIGN? */
1825 }
1826
25b75a48
BE
1827 /* We can set the alignment from the type if we are making an object or if
1828 this is an INDIRECT_REF. */
1829 if (objectp || TREE_CODE (t) == INDIRECT_REF)
f12144dd 1830 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1831
738cc472 1832 /* If the size is known, we can set that. */
a787ccc3 1833 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1834
30b0317c
RB
1835 /* The address-space is that of the type. */
1836 as = TYPE_ADDR_SPACE (type);
1837
80965c18
RK
1838 /* If T is not a type, we may be able to deduce some more information about
1839 the expression. */
1840 if (! TYPE_P (t))
8ac61af7 1841 {
8476af98 1842 tree base;
389fdba0 1843
8ac61af7
RK
1844 if (TREE_THIS_VOLATILE (t))
1845 MEM_VOLATILE_P (ref) = 1;
173b24b9 1846
c56e3582
RK
1847 /* Now remove any conversions: they don't change what the underlying
1848 object is. Likewise for SAVE_EXPR. */
1043771b 1849 while (CONVERT_EXPR_P (t)
c56e3582
RK
1850 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1851 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1852 t = TREE_OPERAND (t, 0);
1853
4994da65
RG
1854 /* Note whether this expression can trap. */
1855 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1856
1857 base = get_base_address (t);
f18a7b25
MJ
1858 if (base)
1859 {
1860 if (DECL_P (base)
1861 && TREE_READONLY (base)
1862 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1863 && !TREE_THIS_VOLATILE (base))
1864 MEM_READONLY_P (ref) = 1;
1865
1866 /* Mark static const strings readonly as well. */
1867 if (TREE_CODE (base) == STRING_CST
1868 && TREE_READONLY (base)
1869 && TREE_STATIC (base))
1870 MEM_READONLY_P (ref) = 1;
1871
30b0317c 1872 /* Address-space information is on the base object. */
f18a7b25
MJ
1873 if (TREE_CODE (base) == MEM_REF
1874 || TREE_CODE (base) == TARGET_MEM_REF)
1875 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1876 0))));
1877 else
1878 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1879 }
ba30e50d 1880
2039d7aa
RH
1881 /* If this expression uses it's parent's alias set, mark it such
1882 that we won't change it. */
b4ada065 1883 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
1884 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1885
8ac61af7
RK
1886 /* If this is a decl, set the attributes of the MEM from it. */
1887 if (DECL_P (t))
1888 {
f12144dd 1889 attrs.expr = t;
754c3d5d
RS
1890 attrs.offset_known_p = true;
1891 attrs.offset = 0;
6f1087be 1892 apply_bitpos = bitpos;
a787ccc3 1893 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1894 }
1895
30b0317c 1896 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1897 else if (CONSTANT_CLASS_P (t))
30b0317c 1898 ;
998d7deb 1899
a787ccc3
RS
1900 /* If this is a field reference, record it. */
1901 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1902 {
f12144dd 1903 attrs.expr = t;
754c3d5d
RS
1904 attrs.offset_known_p = true;
1905 attrs.offset = 0;
6f1087be 1906 apply_bitpos = bitpos;
a787ccc3
RS
1907 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1908 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1909 }
1910
1911 /* If this is an array reference, look for an outer field reference. */
1912 else if (TREE_CODE (t) == ARRAY_REF)
1913 {
1914 tree off_tree = size_zero_node;
1b1838b6
JW
1915 /* We can't modify t, because we use it at the end of the
1916 function. */
1917 tree t2 = t;
998d7deb
RH
1918
1919 do
1920 {
1b1838b6 1921 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1922 tree low_bound = array_ref_low_bound (t2);
1923 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1924
1925 /* We assume all arrays have sizes that are a multiple of a byte.
1926 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1927 index, then convert to sizetype and multiply by the size of
1928 the array element. */
1929 if (! integer_zerop (low_bound))
4845b383
KH
1930 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1931 index, low_bound);
2567406a 1932
44de5aeb 1933 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1934 size_binop (MULT_EXPR,
1935 fold_convert (sizetype,
1936 index),
44de5aeb
RK
1937 unit_size),
1938 off_tree);
1b1838b6 1939 t2 = TREE_OPERAND (t2, 0);
998d7deb 1940 }
1b1838b6 1941 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1942
30b0317c
RB
1943 if (DECL_P (t2)
1944 || TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1945 {
f12144dd 1946 attrs.expr = t2;
754c3d5d 1947 attrs.offset_known_p = false;
cc269bb6 1948 if (tree_fits_uhwi_p (off_tree))
6f1087be 1949 {
754c3d5d 1950 attrs.offset_known_p = true;
ae7e9ddd 1951 attrs.offset = tree_to_uhwi (off_tree);
6f1087be
RH
1952 apply_bitpos = bitpos;
1953 }
998d7deb 1954 }
30b0317c 1955 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1956 }
1957
56c47f22 1958 /* If this is an indirect reference, record it. */
70f34814 1959 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1960 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1961 {
f12144dd 1962 attrs.expr = t;
754c3d5d
RS
1963 attrs.offset_known_p = true;
1964 attrs.offset = 0;
56c47f22
RG
1965 apply_bitpos = bitpos;
1966 }
1967
30b0317c
RB
1968 /* Compute the alignment. */
1969 unsigned int obj_align;
1970 unsigned HOST_WIDE_INT obj_bitpos;
1971 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1972 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1973 if (obj_bitpos != 0)
146ec50f 1974 obj_align = least_bit_hwi (obj_bitpos);
30b0317c 1975 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
1976 }
1977
cc269bb6 1978 if (tree_fits_uhwi_p (new_size))
a787ccc3
RS
1979 {
1980 attrs.size_known_p = true;
ae7e9ddd 1981 attrs.size = tree_to_uhwi (new_size);
a787ccc3
RS
1982 }
1983
15c812e3 1984 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1985 bit position offset. Similarly, increase the size of the accessed
1986 object to contain the negative offset. */
6f1087be 1987 if (apply_bitpos)
8c317c5f 1988 {
754c3d5d
RS
1989 gcc_assert (attrs.offset_known_p);
1990 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1991 if (attrs.size_known_p)
1992 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1993 }
6f1087be 1994
8ac61af7 1995 /* Now set the attributes we computed above. */
f18a7b25 1996 attrs.addrspace = as;
f12144dd 1997 set_mem_attrs (ref, &attrs);
173b24b9
RK
1998}
1999
6f1087be 2000void
502b8322 2001set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
2002{
2003 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2004}
2005
173b24b9
RK
2006/* Set the alias set of MEM to SET. */
2007
2008void
4862826d 2009set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 2010{
f12144dd
RS
2011 struct mem_attrs attrs;
2012
173b24b9 2013 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 2014 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
2015 attrs = *get_mem_attrs (mem);
2016 attrs.alias = set;
2017 set_mem_attrs (mem, &attrs);
09e881c9
BE
2018}
2019
2020/* Set the address space of MEM to ADDRSPACE (target-defined). */
2021
2022void
2023set_mem_addr_space (rtx mem, addr_space_t addrspace)
2024{
f12144dd
RS
2025 struct mem_attrs attrs;
2026
2027 attrs = *get_mem_attrs (mem);
2028 attrs.addrspace = addrspace;
2029 set_mem_attrs (mem, &attrs);
173b24b9 2030}
738cc472 2031
d022d93e 2032/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
2033
2034void
502b8322 2035set_mem_align (rtx mem, unsigned int align)
738cc472 2036{
f12144dd
RS
2037 struct mem_attrs attrs;
2038
2039 attrs = *get_mem_attrs (mem);
2040 attrs.align = align;
2041 set_mem_attrs (mem, &attrs);
738cc472 2042}
1285011e 2043
998d7deb 2044/* Set the expr for MEM to EXPR. */
1285011e
RK
2045
2046void
502b8322 2047set_mem_expr (rtx mem, tree expr)
1285011e 2048{
f12144dd
RS
2049 struct mem_attrs attrs;
2050
2051 attrs = *get_mem_attrs (mem);
2052 attrs.expr = expr;
2053 set_mem_attrs (mem, &attrs);
1285011e 2054}
998d7deb
RH
2055
2056/* Set the offset of MEM to OFFSET. */
2057
2058void
527210c4 2059set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 2060{
f12144dd
RS
2061 struct mem_attrs attrs;
2062
2063 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2064 attrs.offset_known_p = true;
2065 attrs.offset = offset;
527210c4
RS
2066 set_mem_attrs (mem, &attrs);
2067}
2068
2069/* Clear the offset of MEM. */
2070
2071void
2072clear_mem_offset (rtx mem)
2073{
2074 struct mem_attrs attrs;
2075
2076 attrs = *get_mem_attrs (mem);
754c3d5d 2077 attrs.offset_known_p = false;
f12144dd 2078 set_mem_attrs (mem, &attrs);
35aff10b
AM
2079}
2080
2081/* Set the size of MEM to SIZE. */
2082
2083void
f5541398 2084set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 2085{
f12144dd
RS
2086 struct mem_attrs attrs;
2087
2088 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2089 attrs.size_known_p = true;
2090 attrs.size = size;
f5541398
RS
2091 set_mem_attrs (mem, &attrs);
2092}
2093
2094/* Clear the size of MEM. */
2095
2096void
2097clear_mem_size (rtx mem)
2098{
2099 struct mem_attrs attrs;
2100
2101 attrs = *get_mem_attrs (mem);
754c3d5d 2102 attrs.size_known_p = false;
f12144dd 2103 set_mem_attrs (mem, &attrs);
998d7deb 2104}
173b24b9 2105\f
738cc472
RK
2106/* Return a memory reference like MEMREF, but with its mode changed to MODE
2107 and its address changed to ADDR. (VOIDmode means don't change the mode.
2108 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2109 returned memory location is required to be valid. INPLACE is true if any
2110 changes can be made directly to MEMREF or false if MEMREF must be treated
2111 as immutable.
2112
2113 The memory attributes are not changed. */
23b2ce53 2114
738cc472 2115static rtx
ef4bddc2 2116change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
23b33725 2117 bool inplace)
23b2ce53 2118{
09e881c9 2119 addr_space_t as;
60564289 2120 rtx new_rtx;
23b2ce53 2121
5b0264cb 2122 gcc_assert (MEM_P (memref));
09e881c9 2123 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2124 if (mode == VOIDmode)
2125 mode = GET_MODE (memref);
2126 if (addr == 0)
2127 addr = XEXP (memref, 0);
a74ff877 2128 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2129 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2130 return memref;
23b2ce53 2131
91c5ee5b
VM
2132 /* Don't validate address for LRA. LRA can make the address valid
2133 by itself in most efficient way. */
2134 if (validate && !lra_in_progress)
23b2ce53 2135 {
f1ec5147 2136 if (reload_in_progress || reload_completed)
09e881c9 2137 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2138 else
09e881c9 2139 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2140 }
750c9258 2141
9b04c6a8
RK
2142 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2143 return memref;
2144
23b33725
RS
2145 if (inplace)
2146 {
2147 XEXP (memref, 0) = addr;
2148 return memref;
2149 }
2150
60564289
KG
2151 new_rtx = gen_rtx_MEM (mode, addr);
2152 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2153 return new_rtx;
23b2ce53 2154}
792760b9 2155
738cc472
RK
2156/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2157 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2158
2159rtx
ef4bddc2 2160change_address (rtx memref, machine_mode mode, rtx addr)
f4ef873c 2161{
23b33725 2162 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
ef4bddc2 2163 machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2164 struct mem_attrs attrs, *defattrs;
4e44c1ef 2165
f12144dd
RS
2166 attrs = *get_mem_attrs (memref);
2167 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2168 attrs.expr = NULL_TREE;
2169 attrs.offset_known_p = false;
2170 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2171 attrs.size = defattrs->size;
2172 attrs.align = defattrs->align;
c2f7bcc3 2173
fdb1c7b3 2174 /* If there are no changes, just return the original memory reference. */
60564289 2175 if (new_rtx == memref)
4e44c1ef 2176 {
f12144dd 2177 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2178 return new_rtx;
4e44c1ef 2179
60564289
KG
2180 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2181 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2182 }
fdb1c7b3 2183
f12144dd 2184 set_mem_attrs (new_rtx, &attrs);
60564289 2185 return new_rtx;
f4ef873c 2186}
792760b9 2187
738cc472
RK
2188/* Return a memory reference like MEMREF, but with its mode changed
2189 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2190 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2191 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2192 and the caller is responsible for adjusting MEMREF base register.
2193 If ADJUST_OBJECT is zero, the underlying object associated with the
2194 memory reference is left unchanged and the caller is responsible for
2195 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2196 the underlying object, even partially, then the object is dropped.
2197 SIZE, if nonzero, is the size of an access in cases where MODE
2198 has no inherent size. */
f1ec5147
RK
2199
2200rtx
ef4bddc2 2201adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2202 int validate, int adjust_address, int adjust_object,
2203 HOST_WIDE_INT size)
f1ec5147 2204{
823e3574 2205 rtx addr = XEXP (memref, 0);
60564289 2206 rtx new_rtx;
ef4bddc2 2207 machine_mode address_mode;
a6fe9ed4 2208 int pbits;
0207fa90 2209 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2210 unsigned HOST_WIDE_INT max_align;
0207fa90 2211#ifdef POINTERS_EXTEND_UNSIGNED
ef4bddc2 2212 machine_mode pointer_mode
0207fa90
EB
2213 = targetm.addr_space.pointer_mode (attrs.addrspace);
2214#endif
823e3574 2215
ee88e690
EB
2216 /* VOIDmode means no mode change for change_address_1. */
2217 if (mode == VOIDmode)
2218 mode = GET_MODE (memref);
2219
5f2cbd0d
RS
2220 /* Take the size of non-BLKmode accesses from the mode. */
2221 defattrs = mode_mem_attrs[(int) mode];
2222 if (defattrs->size_known_p)
2223 size = defattrs->size;
2224
fdb1c7b3
JH
2225 /* If there are no changes, just return the original memory reference. */
2226 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2227 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2228 && (!validate || memory_address_addr_space_p (mode, addr,
2229 attrs.addrspace)))
fdb1c7b3
JH
2230 return memref;
2231
d14419e4 2232 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2233 This may happen even if offset is nonzero -- consider
d14419e4
RH
2234 (plus (plus reg reg) const_int) -- so do this always. */
2235 addr = copy_rtx (addr);
2236
a6fe9ed4
JM
2237 /* Convert a possibly large offset to a signed value within the
2238 range of the target address space. */
372d6395 2239 address_mode = get_address_mode (memref);
d4ebfa65 2240 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2241 if (HOST_BITS_PER_WIDE_INT > pbits)
2242 {
2243 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2244 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2245 >> shift);
2246 }
2247
5ef0b50d 2248 if (adjust_address)
4a78c787
RH
2249 {
2250 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2251 object, we can merge it into the LO_SUM. */
2252 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2253 && offset >= 0
2254 && (unsigned HOST_WIDE_INT) offset
2255 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2256 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2257 plus_constant (address_mode,
2258 XEXP (addr, 1), offset));
0207fa90
EB
2259#ifdef POINTERS_EXTEND_UNSIGNED
2260 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2261 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2262 the fact that pointers are not allowed to overflow. */
2263 else if (POINTERS_EXTEND_UNSIGNED > 0
2264 && GET_CODE (addr) == ZERO_EXTEND
2265 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2266 && trunc_int_for_mode (offset, pointer_mode) == offset)
2267 addr = gen_rtx_ZERO_EXTEND (address_mode,
2268 plus_constant (pointer_mode,
2269 XEXP (addr, 0), offset));
2270#endif
4a78c787 2271 else
0a81f074 2272 addr = plus_constant (address_mode, addr, offset);
4a78c787 2273 }
823e3574 2274
23b33725 2275 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2276
09efeca1
PB
2277 /* If the address is a REG, change_address_1 rightfully returns memref,
2278 but this would destroy memref's MEM_ATTRS. */
2279 if (new_rtx == memref && offset != 0)
2280 new_rtx = copy_rtx (new_rtx);
2281
5ef0b50d
EB
2282 /* Conservatively drop the object if we don't know where we start from. */
2283 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2284 {
2285 attrs.expr = NULL_TREE;
2286 attrs.alias = 0;
2287 }
2288
738cc472
RK
2289 /* Compute the new values of the memory attributes due to this adjustment.
2290 We add the offsets and update the alignment. */
754c3d5d 2291 if (attrs.offset_known_p)
5ef0b50d
EB
2292 {
2293 attrs.offset += offset;
2294
2295 /* Drop the object if the new left end is not within its bounds. */
2296 if (adjust_object && attrs.offset < 0)
2297 {
2298 attrs.expr = NULL_TREE;
2299 attrs.alias = 0;
2300 }
2301 }
738cc472 2302
03bf2c23
RK
2303 /* Compute the new alignment by taking the MIN of the alignment and the
2304 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2305 if zero. */
2306 if (offset != 0)
f12144dd 2307 {
146ec50f 2308 max_align = least_bit_hwi (offset) * BITS_PER_UNIT;
f12144dd
RS
2309 attrs.align = MIN (attrs.align, max_align);
2310 }
738cc472 2311
5f2cbd0d 2312 if (size)
754c3d5d 2313 {
5ef0b50d 2314 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2315 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2316 {
2317 attrs.expr = NULL_TREE;
2318 attrs.alias = 0;
2319 }
754c3d5d 2320 attrs.size_known_p = true;
5f2cbd0d 2321 attrs.size = size;
754c3d5d
RS
2322 }
2323 else if (attrs.size_known_p)
5ef0b50d 2324 {
5f2cbd0d 2325 gcc_assert (!adjust_object);
5ef0b50d 2326 attrs.size -= offset;
5f2cbd0d
RS
2327 /* ??? The store_by_pieces machinery generates negative sizes,
2328 so don't assert for that here. */
5ef0b50d 2329 }
10b76d73 2330
f12144dd 2331 set_mem_attrs (new_rtx, &attrs);
738cc472 2332
60564289 2333 return new_rtx;
f1ec5147
RK
2334}
2335
630036c6
JJ
2336/* Return a memory reference like MEMREF, but with its mode changed
2337 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2338 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2339 nonzero, the memory address is forced to be valid. */
2340
2341rtx
ef4bddc2 2342adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
502b8322 2343 HOST_WIDE_INT offset, int validate)
630036c6 2344{
23b33725 2345 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2346 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2347}
2348
8ac61af7
RK
2349/* Return a memory reference like MEMREF, but whose address is changed by
2350 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2351 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2352
2353rtx
502b8322 2354offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2355{
60564289 2356 rtx new_rtx, addr = XEXP (memref, 0);
ef4bddc2 2357 machine_mode address_mode;
754c3d5d 2358 struct mem_attrs attrs, *defattrs;
e3c8ea67 2359
f12144dd 2360 attrs = *get_mem_attrs (memref);
372d6395 2361 address_mode = get_address_mode (memref);
d4ebfa65 2362 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2363
68252e27 2364 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2365 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2366
2367 However, if we did go and rearrange things, we can wind up not
2368 being able to recognize the magic around pic_offset_table_rtx.
2369 This stuff is fragile, and is yet another example of why it is
2370 bad to expose PIC machinery too early. */
f12144dd
RS
2371 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2372 attrs.addrspace)
e3c8ea67
RH
2373 && GET_CODE (addr) == PLUS
2374 && XEXP (addr, 0) == pic_offset_table_rtx)
2375 {
2376 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2377 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2378 }
2379
60564289 2380 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2381 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2382
fdb1c7b3 2383 /* If there are no changes, just return the original memory reference. */
60564289
KG
2384 if (new_rtx == memref)
2385 return new_rtx;
fdb1c7b3 2386
0d4903b8
RK
2387 /* Update the alignment to reflect the offset. Reset the offset, which
2388 we don't know. */
754c3d5d
RS
2389 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2390 attrs.offset_known_p = false;
2391 attrs.size_known_p = defattrs->size_known_p;
2392 attrs.size = defattrs->size;
f12144dd
RS
2393 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2394 set_mem_attrs (new_rtx, &attrs);
60564289 2395 return new_rtx;
0d4903b8 2396}
68252e27 2397
792760b9
RK
2398/* Return a memory reference like MEMREF, but with its address changed to
2399 ADDR. The caller is asserting that the actual piece of memory pointed
2400 to is the same, just the form of the address is being changed, such as
23b33725
RS
2401 by putting something into a register. INPLACE is true if any changes
2402 can be made directly to MEMREF or false if MEMREF must be treated as
2403 immutable. */
792760b9
RK
2404
2405rtx
23b33725 2406replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2407{
738cc472
RK
2408 /* change_address_1 copies the memory attribute structure without change
2409 and that's exactly what we want here. */
40c0668b 2410 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2411 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2412}
738cc472 2413
f1ec5147
RK
2414/* Likewise, but the reference is not required to be valid. */
2415
2416rtx
23b33725 2417replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2418{
23b33725 2419 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2420}
e7dfe4bb
RH
2421
2422/* Return a memory reference like MEMREF, but with its mode widened to
2423 MODE and offset by OFFSET. This would be used by targets that e.g.
2424 cannot issue QImode memory operations and have to use SImode memory
2425 operations plus masking logic. */
2426
2427rtx
ef4bddc2 2428widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2429{
5f2cbd0d 2430 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2431 struct mem_attrs attrs;
e7dfe4bb
RH
2432 unsigned int size = GET_MODE_SIZE (mode);
2433
fdb1c7b3 2434 /* If there are no changes, just return the original memory reference. */
60564289
KG
2435 if (new_rtx == memref)
2436 return new_rtx;
fdb1c7b3 2437
f12144dd
RS
2438 attrs = *get_mem_attrs (new_rtx);
2439
e7dfe4bb
RH
2440 /* If we don't know what offset we were at within the expression, then
2441 we can't know if we've overstepped the bounds. */
754c3d5d 2442 if (! attrs.offset_known_p)
f12144dd 2443 attrs.expr = NULL_TREE;
e7dfe4bb 2444
f12144dd 2445 while (attrs.expr)
e7dfe4bb 2446 {
f12144dd 2447 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2448 {
f12144dd
RS
2449 tree field = TREE_OPERAND (attrs.expr, 1);
2450 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2451
2452 if (! DECL_SIZE_UNIT (field))
2453 {
f12144dd 2454 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2455 break;
2456 }
2457
2458 /* Is the field at least as large as the access? If so, ok,
2459 otherwise strip back to the containing structure. */
03667700
RK
2460 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2461 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2462 && attrs.offset >= 0)
e7dfe4bb
RH
2463 break;
2464
cc269bb6 2465 if (! tree_fits_uhwi_p (offset))
e7dfe4bb 2466 {
f12144dd 2467 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2468 break;
2469 }
2470
f12144dd 2471 attrs.expr = TREE_OPERAND (attrs.expr, 0);
ae7e9ddd
RS
2472 attrs.offset += tree_to_uhwi (offset);
2473 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2474 / BITS_PER_UNIT);
e7dfe4bb
RH
2475 }
2476 /* Similarly for the decl. */
f12144dd
RS
2477 else if (DECL_P (attrs.expr)
2478 && DECL_SIZE_UNIT (attrs.expr)
2479 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2480 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2481 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2482 break;
2483 else
2484 {
2485 /* The widened memory access overflows the expression, which means
2486 that it could alias another expression. Zap it. */
f12144dd 2487 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2488 break;
2489 }
2490 }
2491
f12144dd 2492 if (! attrs.expr)
754c3d5d 2493 attrs.offset_known_p = false;
e7dfe4bb
RH
2494
2495 /* The widened memory may alias other stuff, so zap the alias set. */
2496 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2497 attrs.alias = 0;
754c3d5d
RS
2498 attrs.size_known_p = true;
2499 attrs.size = size;
f12144dd 2500 set_mem_attrs (new_rtx, &attrs);
60564289 2501 return new_rtx;
e7dfe4bb 2502}
23b2ce53 2503\f
f6129d66
RH
2504/* A fake decl that is used as the MEM_EXPR of spill slots. */
2505static GTY(()) tree spill_slot_decl;
2506
3d7e23f6
RH
2507tree
2508get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2509{
2510 tree d = spill_slot_decl;
2511 rtx rd;
f12144dd 2512 struct mem_attrs attrs;
f6129d66 2513
3d7e23f6 2514 if (d || !force_build_p)
f6129d66
RH
2515 return d;
2516
c2255bc4
AH
2517 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2518 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2519 DECL_ARTIFICIAL (d) = 1;
2520 DECL_IGNORED_P (d) = 1;
2521 TREE_USED (d) = 1;
f6129d66
RH
2522 spill_slot_decl = d;
2523
2524 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2525 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2526 attrs = *mode_mem_attrs[(int) BLKmode];
2527 attrs.alias = new_alias_set ();
2528 attrs.expr = d;
2529 set_mem_attrs (rd, &attrs);
f6129d66
RH
2530 SET_DECL_RTL (d, rd);
2531
2532 return d;
2533}
2534
2535/* Given MEM, a result from assign_stack_local, fill in the memory
2536 attributes as appropriate for a register allocator spill slot.
2537 These slots are not aliasable by other memory. We arrange for
2538 them all to use a single MEM_EXPR, so that the aliasing code can
2539 work properly in the case of shared spill slots. */
2540
2541void
2542set_mem_attrs_for_spill (rtx mem)
2543{
f12144dd
RS
2544 struct mem_attrs attrs;
2545 rtx addr;
f6129d66 2546
f12144dd
RS
2547 attrs = *get_mem_attrs (mem);
2548 attrs.expr = get_spill_slot_decl (true);
2549 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2550 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2551
2552 /* We expect the incoming memory to be of the form:
2553 (mem:MODE (plus (reg sfp) (const_int offset)))
2554 with perhaps the plus missing for offset = 0. */
2555 addr = XEXP (mem, 0);
754c3d5d
RS
2556 attrs.offset_known_p = true;
2557 attrs.offset = 0;
f6129d66 2558 if (GET_CODE (addr) == PLUS
481683e1 2559 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2560 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2561
f12144dd 2562 set_mem_attrs (mem, &attrs);
f6129d66
RH
2563 MEM_NOTRAP_P (mem) = 1;
2564}
2565\f
23b2ce53
RS
2566/* Return a newly created CODE_LABEL rtx with a unique label number. */
2567
7dcc3ab5 2568rtx_code_label *
502b8322 2569gen_label_rtx (void)
23b2ce53 2570{
7dcc3ab5
DM
2571 return as_a <rtx_code_label *> (
2572 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2573 NULL, label_num++, NULL));
23b2ce53
RS
2574}
2575\f
2576/* For procedure integration. */
2577
23b2ce53 2578/* Install new pointers to the first and last insns in the chain.
86fe05e0 2579 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2580 Used for an inline-procedure after copying the insn chain. */
2581
2582void
fee3e72c 2583set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2584{
fee3e72c 2585 rtx_insn *insn;
86fe05e0 2586
5936d944
JH
2587 set_first_insn (first);
2588 set_last_insn (last);
86fe05e0
RK
2589 cur_insn_uid = 0;
2590
b5b8b0ac
AO
2591 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2592 {
2593 int debug_count = 0;
2594
2595 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2596 cur_debug_insn_uid = 0;
2597
2598 for (insn = first; insn; insn = NEXT_INSN (insn))
2599 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2600 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2601 else
2602 {
2603 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2604 if (DEBUG_INSN_P (insn))
2605 debug_count++;
2606 }
2607
2608 if (debug_count)
2609 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2610 else
2611 cur_debug_insn_uid++;
2612 }
2613 else
2614 for (insn = first; insn; insn = NEXT_INSN (insn))
2615 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2616
2617 cur_insn_uid++;
23b2ce53 2618}
23b2ce53 2619\f
750c9258 2620/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2621 structure. This routine should only be called once. */
23b2ce53 2622
fd743bc1 2623static void
6bb9bf63 2624unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2625{
d1b81779 2626 /* Unshare just about everything else. */
2c07f13b 2627 unshare_all_rtl_in_chain (insn);
750c9258 2628
23b2ce53
RS
2629 /* Make sure the addresses of stack slots found outside the insn chain
2630 (such as, in DECL_RTL of a variable) are not shared
2631 with the insn chain.
2632
2633 This special care is necessary when the stack slot MEM does not
2634 actually appear in the insn chain. If it does appear, its address
2635 is unshared from all else at that point. */
8c39f8ae
TS
2636 unsigned int i;
2637 rtx temp;
2638 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2639 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
23b2ce53
RS
2640}
2641
750c9258 2642/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2643 structure, again. This is a fairly expensive thing to do so it
2644 should be done sparingly. */
2645
2646void
6bb9bf63 2647unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2648{
6bb9bf63 2649 rtx_insn *p;
624c87aa
RE
2650 tree decl;
2651
d1b81779 2652 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2653 if (INSN_P (p))
d1b81779
GK
2654 {
2655 reset_used_flags (PATTERN (p));
2656 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2657 if (CALL_P (p))
2658 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2659 }
624c87aa 2660
2d4aecb3 2661 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2662 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2663
624c87aa 2664 /* Make sure that virtual parameters are not shared. */
910ad8de 2665 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2666 set_used_flags (DECL_RTL (decl));
624c87aa 2667
8c39f8ae
TS
2668 rtx temp;
2669 unsigned int i;
2670 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2671 reset_used_flags (temp);
624c87aa 2672
b4aaa77b 2673 unshare_all_rtl_1 (insn);
fd743bc1
PB
2674}
2675
c2924966 2676unsigned int
fd743bc1
PB
2677unshare_all_rtl (void)
2678{
b4aaa77b 2679 unshare_all_rtl_1 (get_insns ());
60ebe8ce
JJ
2680
2681 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2682 {
2683 if (DECL_RTL_SET_P (decl))
2684 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2685 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2686 }
2687
c2924966 2688 return 0;
d1b81779
GK
2689}
2690
ef330312 2691
2c07f13b
JH
2692/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2693 Recursively does the same for subexpressions. */
2694
2695static void
2696verify_rtx_sharing (rtx orig, rtx insn)
2697{
2698 rtx x = orig;
2699 int i;
2700 enum rtx_code code;
2701 const char *format_ptr;
2702
2703 if (x == 0)
2704 return;
2705
2706 code = GET_CODE (x);
2707
2708 /* These types may be freely shared. */
2709
2710 switch (code)
2711 {
2712 case REG:
0ca5af51
AO
2713 case DEBUG_EXPR:
2714 case VALUE:
d8116890 2715 CASE_CONST_ANY:
2c07f13b
JH
2716 case SYMBOL_REF:
2717 case LABEL_REF:
2718 case CODE_LABEL:
2719 case PC:
2720 case CC0:
3810076b 2721 case RETURN:
26898771 2722 case SIMPLE_RETURN:
2c07f13b 2723 case SCRATCH:
3e89ed8d 2724 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2725 return;
3e89ed8d 2726 case CLOBBER:
c5c5ba89
JH
2727 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2728 clobbers or clobbers of hard registers that originated as pseudos.
2729 This is needed to allow safe register renaming. */
d7ae3739
EB
2730 if (REG_P (XEXP (x, 0))
2731 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2732 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3e89ed8d
JH
2733 return;
2734 break;
2c07f13b
JH
2735
2736 case CONST:
6fb5fa3c 2737 if (shared_const_p (orig))
2c07f13b
JH
2738 return;
2739 break;
2740
2741 case MEM:
2742 /* A MEM is allowed to be shared if its address is constant. */
2743 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2744 || reload_completed || reload_in_progress)
2745 return;
2746
2747 break;
2748
2749 default:
2750 break;
2751 }
2752
2753 /* This rtx may not be shared. If it has already been seen,
2754 replace it with a copy of itself. */
b2b29377 2755 if (flag_checking && RTX_FLAG (x, used))
2c07f13b 2756 {
ab532386 2757 error ("invalid rtl sharing found in the insn");
2c07f13b 2758 debug_rtx (insn);
ab532386 2759 error ("shared rtx");
2c07f13b 2760 debug_rtx (x);
ab532386 2761 internal_error ("internal consistency failure");
2c07f13b 2762 }
1a2caa7a 2763 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2764
2c07f13b
JH
2765 RTX_FLAG (x, used) = 1;
2766
6614fd40 2767 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2768
2769 format_ptr = GET_RTX_FORMAT (code);
2770
2771 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2772 {
2773 switch (*format_ptr++)
2774 {
2775 case 'e':
2776 verify_rtx_sharing (XEXP (x, i), insn);
2777 break;
2778
2779 case 'E':
2780 if (XVEC (x, i) != NULL)
2781 {
2782 int j;
2783 int len = XVECLEN (x, i);
2784
2785 for (j = 0; j < len; j++)
2786 {
1a2caa7a
NS
2787 /* We allow sharing of ASM_OPERANDS inside single
2788 instruction. */
2c07f13b 2789 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2790 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2791 == ASM_OPERANDS))
2c07f13b
JH
2792 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2793 else
2794 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2795 }
2796 }
2797 break;
2798 }
2799 }
2800 return;
2801}
2802
0e0f87d4
SB
2803/* Reset used-flags for INSN. */
2804
2805static void
2806reset_insn_used_flags (rtx insn)
2807{
2808 gcc_assert (INSN_P (insn));
2809 reset_used_flags (PATTERN (insn));
2810 reset_used_flags (REG_NOTES (insn));
2811 if (CALL_P (insn))
2812 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2813}
2814
a24243a0 2815/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2816
a24243a0
AK
2817static void
2818reset_all_used_flags (void)
2c07f13b 2819{
dc01c3d1 2820 rtx_insn *p;
2c07f13b
JH
2821
2822 for (p = get_insns (); p; p = NEXT_INSN (p))
2823 if (INSN_P (p))
2824 {
0e0f87d4
SB
2825 rtx pat = PATTERN (p);
2826 if (GET_CODE (pat) != SEQUENCE)
2827 reset_insn_used_flags (p);
2828 else
2954a813 2829 {
0e0f87d4
SB
2830 gcc_assert (REG_NOTES (p) == NULL);
2831 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2832 {
2833 rtx insn = XVECEXP (pat, 0, i);
2834 if (INSN_P (insn))
2835 reset_insn_used_flags (insn);
2836 }
2954a813 2837 }
2c07f13b 2838 }
a24243a0
AK
2839}
2840
0e0f87d4
SB
2841/* Verify sharing in INSN. */
2842
2843static void
2844verify_insn_sharing (rtx insn)
2845{
2846 gcc_assert (INSN_P (insn));
4b498f72
JJ
2847 verify_rtx_sharing (PATTERN (insn), insn);
2848 verify_rtx_sharing (REG_NOTES (insn), insn);
0e0f87d4 2849 if (CALL_P (insn))
4b498f72 2850 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
0e0f87d4
SB
2851}
2852
a24243a0
AK
2853/* Go through all the RTL insn bodies and check that there is no unexpected
2854 sharing in between the subexpressions. */
2855
2856DEBUG_FUNCTION void
2857verify_rtl_sharing (void)
2858{
dc01c3d1 2859 rtx_insn *p;
a24243a0
AK
2860
2861 timevar_push (TV_VERIFY_RTL_SHARING);
2862
2863 reset_all_used_flags ();
2c07f13b
JH
2864
2865 for (p = get_insns (); p; p = NEXT_INSN (p))
2866 if (INSN_P (p))
2867 {
0e0f87d4
SB
2868 rtx pat = PATTERN (p);
2869 if (GET_CODE (pat) != SEQUENCE)
2870 verify_insn_sharing (p);
2871 else
2872 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2873 {
2874 rtx insn = XVECEXP (pat, 0, i);
2875 if (INSN_P (insn))
2876 verify_insn_sharing (insn);
2877 }
2c07f13b 2878 }
a222c01a 2879
a24243a0
AK
2880 reset_all_used_flags ();
2881
a222c01a 2882 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2883}
2884
d1b81779
GK
2885/* Go through all the RTL insn bodies and copy any invalid shared structure.
2886 Assumes the mark bits are cleared at entry. */
2887
2c07f13b 2888void
dc01c3d1 2889unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
2890{
2891 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2892 if (INSN_P (insn))
d1b81779
GK
2893 {
2894 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2895 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2896 if (CALL_P (insn))
2897 CALL_INSN_FUNCTION_USAGE (insn)
2898 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2899 }
2900}
2901
2d4aecb3 2902/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2903 shared. We never replace the DECL_RTLs themselves with a copy,
2904 but expressions mentioned into a DECL_RTL cannot be shared with
2905 expressions in the instruction stream.
2906
2907 Note that reload may convert pseudo registers into memories in-place.
2908 Pseudo registers are always shared, but MEMs never are. Thus if we
2909 reset the used flags on MEMs in the instruction stream, we must set
2910 them again on MEMs that appear in DECL_RTLs. */
2911
2d4aecb3 2912static void
5eb2a9f2 2913set_used_decls (tree blk)
2d4aecb3
AO
2914{
2915 tree t;
2916
2917 /* Mark decls. */
910ad8de 2918 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2919 if (DECL_RTL_SET_P (t))
5eb2a9f2 2920 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2921
2922 /* Now process sub-blocks. */
87caf699 2923 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2924 set_used_decls (t);
2d4aecb3
AO
2925}
2926
23b2ce53 2927/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2928 Recursively does the same for subexpressions. Uses
2929 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2930
2931rtx
502b8322 2932copy_rtx_if_shared (rtx orig)
23b2ce53 2933{
32b32b16
AP
2934 copy_rtx_if_shared_1 (&orig);
2935 return orig;
2936}
2937
ff954f39
AP
2938/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2939 use. Recursively does the same for subexpressions. */
2940
32b32b16
AP
2941static void
2942copy_rtx_if_shared_1 (rtx *orig1)
2943{
2944 rtx x;
b3694847
SS
2945 int i;
2946 enum rtx_code code;
32b32b16 2947 rtx *last_ptr;
b3694847 2948 const char *format_ptr;
23b2ce53 2949 int copied = 0;
32b32b16
AP
2950 int length;
2951
2952 /* Repeat is used to turn tail-recursion into iteration. */
2953repeat:
2954 x = *orig1;
23b2ce53
RS
2955
2956 if (x == 0)
32b32b16 2957 return;
23b2ce53
RS
2958
2959 code = GET_CODE (x);
2960
2961 /* These types may be freely shared. */
2962
2963 switch (code)
2964 {
2965 case REG:
0ca5af51
AO
2966 case DEBUG_EXPR:
2967 case VALUE:
d8116890 2968 CASE_CONST_ANY:
23b2ce53 2969 case SYMBOL_REF:
2c07f13b 2970 case LABEL_REF:
23b2ce53
RS
2971 case CODE_LABEL:
2972 case PC:
2973 case CC0:
276e0224 2974 case RETURN:
26898771 2975 case SIMPLE_RETURN:
23b2ce53 2976 case SCRATCH:
0f41302f 2977 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2978 return;
3e89ed8d 2979 case CLOBBER:
c5c5ba89
JH
2980 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2981 clobbers or clobbers of hard registers that originated as pseudos.
2982 This is needed to allow safe register renaming. */
d7ae3739
EB
2983 if (REG_P (XEXP (x, 0))
2984 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2985 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3e89ed8d
JH
2986 return;
2987 break;
23b2ce53 2988
b851ea09 2989 case CONST:
6fb5fa3c 2990 if (shared_const_p (x))
32b32b16 2991 return;
b851ea09
RK
2992 break;
2993
b5b8b0ac 2994 case DEBUG_INSN:
23b2ce53
RS
2995 case INSN:
2996 case JUMP_INSN:
2997 case CALL_INSN:
2998 case NOTE:
23b2ce53
RS
2999 case BARRIER:
3000 /* The chain of insns is not being copied. */
32b32b16 3001 return;
23b2ce53 3002
e9a25f70
JL
3003 default:
3004 break;
23b2ce53
RS
3005 }
3006
3007 /* This rtx may not be shared. If it has already been seen,
3008 replace it with a copy of itself. */
3009
2adc7f12 3010 if (RTX_FLAG (x, used))
23b2ce53 3011 {
aacd3885 3012 x = shallow_copy_rtx (x);
23b2ce53
RS
3013 copied = 1;
3014 }
2adc7f12 3015 RTX_FLAG (x, used) = 1;
23b2ce53
RS
3016
3017 /* Now scan the subexpressions recursively.
3018 We can store any replaced subexpressions directly into X
3019 since we know X is not shared! Any vectors in X
3020 must be copied if X was copied. */
3021
3022 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
3023 length = GET_RTX_LENGTH (code);
3024 last_ptr = NULL;
b8698a0f 3025
32b32b16 3026 for (i = 0; i < length; i++)
23b2ce53
RS
3027 {
3028 switch (*format_ptr++)
3029 {
3030 case 'e':
32b32b16
AP
3031 if (last_ptr)
3032 copy_rtx_if_shared_1 (last_ptr);
3033 last_ptr = &XEXP (x, i);
23b2ce53
RS
3034 break;
3035
3036 case 'E':
3037 if (XVEC (x, i) != NULL)
3038 {
b3694847 3039 int j;
f0722107 3040 int len = XVECLEN (x, i);
b8698a0f 3041
6614fd40
KH
3042 /* Copy the vector iff I copied the rtx and the length
3043 is nonzero. */
f0722107 3044 if (copied && len > 0)
8f985ec4 3045 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 3046
5d3cc252 3047 /* Call recursively on all inside the vector. */
f0722107 3048 for (j = 0; j < len; j++)
32b32b16
AP
3049 {
3050 if (last_ptr)
3051 copy_rtx_if_shared_1 (last_ptr);
3052 last_ptr = &XVECEXP (x, i, j);
3053 }
23b2ce53
RS
3054 }
3055 break;
3056 }
3057 }
32b32b16
AP
3058 *orig1 = x;
3059 if (last_ptr)
3060 {
3061 orig1 = last_ptr;
3062 goto repeat;
3063 }
3064 return;
23b2ce53
RS
3065}
3066
76369a82 3067/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 3068
76369a82
NF
3069static void
3070mark_used_flags (rtx x, int flag)
23b2ce53 3071{
b3694847
SS
3072 int i, j;
3073 enum rtx_code code;
3074 const char *format_ptr;
32b32b16 3075 int length;
23b2ce53 3076
32b32b16
AP
3077 /* Repeat is used to turn tail-recursion into iteration. */
3078repeat:
23b2ce53
RS
3079 if (x == 0)
3080 return;
3081
3082 code = GET_CODE (x);
3083
9faa82d8 3084 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3085 for them. */
3086
3087 switch (code)
3088 {
3089 case REG:
0ca5af51
AO
3090 case DEBUG_EXPR:
3091 case VALUE:
d8116890 3092 CASE_CONST_ANY:
23b2ce53
RS
3093 case SYMBOL_REF:
3094 case CODE_LABEL:
3095 case PC:
3096 case CC0:
276e0224 3097 case RETURN:
26898771 3098 case SIMPLE_RETURN:
23b2ce53
RS
3099 return;
3100
b5b8b0ac 3101 case DEBUG_INSN:
23b2ce53
RS
3102 case INSN:
3103 case JUMP_INSN:
3104 case CALL_INSN:
3105 case NOTE:
3106 case LABEL_REF:
3107 case BARRIER:
3108 /* The chain of insns is not being copied. */
3109 return;
750c9258 3110
e9a25f70
JL
3111 default:
3112 break;
23b2ce53
RS
3113 }
3114
76369a82 3115 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3116
3117 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3118 length = GET_RTX_LENGTH (code);
b8698a0f 3119
32b32b16 3120 for (i = 0; i < length; i++)
23b2ce53
RS
3121 {
3122 switch (*format_ptr++)
3123 {
3124 case 'e':
32b32b16
AP
3125 if (i == length-1)
3126 {
3127 x = XEXP (x, i);
3128 goto repeat;
3129 }
76369a82 3130 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3131 break;
3132
3133 case 'E':
3134 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3135 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3136 break;
3137 }
3138 }
3139}
2c07f13b 3140
76369a82 3141/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3142 to look for shared sub-parts. */
3143
3144void
76369a82 3145reset_used_flags (rtx x)
2c07f13b 3146{
76369a82
NF
3147 mark_used_flags (x, 0);
3148}
2c07f13b 3149
76369a82
NF
3150/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3151 to look for shared sub-parts. */
2c07f13b 3152
76369a82
NF
3153void
3154set_used_flags (rtx x)
3155{
3156 mark_used_flags (x, 1);
2c07f13b 3157}
23b2ce53
RS
3158\f
3159/* Copy X if necessary so that it won't be altered by changes in OTHER.
3160 Return X or the rtx for the pseudo reg the value of X was copied into.
3161 OTHER must be valid as a SET_DEST. */
3162
3163rtx
502b8322 3164make_safe_from (rtx x, rtx other)
23b2ce53
RS
3165{
3166 while (1)
3167 switch (GET_CODE (other))
3168 {
3169 case SUBREG:
3170 other = SUBREG_REG (other);
3171 break;
3172 case STRICT_LOW_PART:
3173 case SIGN_EXTEND:
3174 case ZERO_EXTEND:
3175 other = XEXP (other, 0);
3176 break;
3177 default:
3178 goto done;
3179 }
3180 done:
3c0cb5de 3181 if ((MEM_P (other)
23b2ce53 3182 && ! CONSTANT_P (x)
f8cfc6aa 3183 && !REG_P (x)
23b2ce53 3184 && GET_CODE (x) != SUBREG)
f8cfc6aa 3185 || (REG_P (other)
23b2ce53
RS
3186 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3187 || reg_mentioned_p (other, x))))
3188 {
3189 rtx temp = gen_reg_rtx (GET_MODE (x));
3190 emit_move_insn (temp, x);
3191 return temp;
3192 }
3193 return x;
3194}
3195\f
3196/* Emission of insns (adding them to the doubly-linked list). */
3197
23b2ce53
RS
3198/* Return the last insn emitted, even if it is in a sequence now pushed. */
3199
db76cf1e 3200rtx_insn *
502b8322 3201get_last_insn_anywhere (void)
23b2ce53 3202{
614d5bd8
AM
3203 struct sequence_stack *seq;
3204 for (seq = get_current_sequence (); seq; seq = seq->next)
3205 if (seq->last != 0)
3206 return seq->last;
23b2ce53
RS
3207 return 0;
3208}
3209
2a496e8b
JDA
3210/* Return the first nonnote insn emitted in current sequence or current
3211 function. This routine looks inside SEQUENCEs. */
3212
e4685bc8 3213rtx_insn *
502b8322 3214get_first_nonnote_insn (void)
2a496e8b 3215{
dc01c3d1 3216 rtx_insn *insn = get_insns ();
91373fe8
JDA
3217
3218 if (insn)
3219 {
3220 if (NOTE_P (insn))
3221 for (insn = next_insn (insn);
3222 insn && NOTE_P (insn);
3223 insn = next_insn (insn))
3224 continue;
3225 else
3226 {
2ca202e7 3227 if (NONJUMP_INSN_P (insn)
91373fe8 3228 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3229 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3230 }
3231 }
2a496e8b
JDA
3232
3233 return insn;
3234}
3235
3236/* Return the last nonnote insn emitted in current sequence or current
3237 function. This routine looks inside SEQUENCEs. */
3238
e4685bc8 3239rtx_insn *
502b8322 3240get_last_nonnote_insn (void)
2a496e8b 3241{
dc01c3d1 3242 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3243
3244 if (insn)
3245 {
3246 if (NOTE_P (insn))
3247 for (insn = previous_insn (insn);
3248 insn && NOTE_P (insn);
3249 insn = previous_insn (insn))
3250 continue;
3251 else
3252 {
dc01c3d1
DM
3253 if (NONJUMP_INSN_P (insn))
3254 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3255 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3256 }
3257 }
2a496e8b
JDA
3258
3259 return insn;
3260}
3261
b5b8b0ac
AO
3262/* Return the number of actual (non-debug) insns emitted in this
3263 function. */
3264
3265int
3266get_max_insn_count (void)
3267{
3268 int n = cur_insn_uid;
3269
3270 /* The table size must be stable across -g, to avoid codegen
3271 differences due to debug insns, and not be affected by
3272 -fmin-insn-uid, to avoid excessive table size and to simplify
3273 debugging of -fcompare-debug failures. */
3274 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3275 n -= cur_debug_insn_uid;
3276 else
3277 n -= MIN_NONDEBUG_INSN_UID;
3278
3279 return n;
3280}
3281
23b2ce53
RS
3282\f
3283/* Return the next insn. If it is a SEQUENCE, return the first insn
3284 of the sequence. */
3285
eb51c837 3286rtx_insn *
4ce524a1 3287next_insn (rtx_insn *insn)
23b2ce53 3288{
75547801
KG
3289 if (insn)
3290 {
3291 insn = NEXT_INSN (insn);
3292 if (insn && NONJUMP_INSN_P (insn)
3293 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3294 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3295 }
23b2ce53 3296
dc01c3d1 3297 return insn;
23b2ce53
RS
3298}
3299
3300/* Return the previous insn. If it is a SEQUENCE, return the last insn
3301 of the sequence. */
3302
eb51c837 3303rtx_insn *
4ce524a1 3304previous_insn (rtx_insn *insn)
23b2ce53 3305{
75547801
KG
3306 if (insn)
3307 {
3308 insn = PREV_INSN (insn);
dc01c3d1
DM
3309 if (insn && NONJUMP_INSN_P (insn))
3310 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3311 insn = seq->insn (seq->len () - 1);
75547801 3312 }
23b2ce53 3313
dc01c3d1 3314 return insn;
23b2ce53
RS
3315}
3316
3317/* Return the next insn after INSN that is not a NOTE. This routine does not
3318 look inside SEQUENCEs. */
3319
eb51c837 3320rtx_insn *
c9b0a227 3321next_nonnote_insn (rtx_insn *insn)
23b2ce53 3322{
75547801
KG
3323 while (insn)
3324 {
3325 insn = NEXT_INSN (insn);
3326 if (insn == 0 || !NOTE_P (insn))
3327 break;
3328 }
23b2ce53 3329
dc01c3d1 3330 return insn;
23b2ce53
RS
3331}
3332
1e211590
DD
3333/* Return the next insn after INSN that is not a NOTE, but stop the
3334 search before we enter another basic block. This routine does not
3335 look inside SEQUENCEs. */
3336
eb51c837 3337rtx_insn *
e4685bc8 3338next_nonnote_insn_bb (rtx_insn *insn)
1e211590
DD
3339{
3340 while (insn)
3341 {
3342 insn = NEXT_INSN (insn);
3343 if (insn == 0 || !NOTE_P (insn))
3344 break;
3345 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3346 return NULL;
1e211590
DD
3347 }
3348
dc01c3d1 3349 return insn;
1e211590
DD
3350}
3351
23b2ce53
RS
3352/* Return the previous insn before INSN that is not a NOTE. This routine does
3353 not look inside SEQUENCEs. */
3354
eb51c837 3355rtx_insn *
c9b0a227 3356prev_nonnote_insn (rtx_insn *insn)
23b2ce53 3357{
75547801
KG
3358 while (insn)
3359 {
3360 insn = PREV_INSN (insn);
3361 if (insn == 0 || !NOTE_P (insn))
3362 break;
3363 }
23b2ce53 3364
dc01c3d1 3365 return insn;
23b2ce53
RS
3366}
3367
896aa4ea
DD
3368/* Return the previous insn before INSN that is not a NOTE, but stop
3369 the search before we enter another basic block. This routine does
3370 not look inside SEQUENCEs. */
3371
eb51c837 3372rtx_insn *
9815687d 3373prev_nonnote_insn_bb (rtx_insn *insn)
896aa4ea 3374{
dc01c3d1 3375
896aa4ea
DD
3376 while (insn)
3377 {
3378 insn = PREV_INSN (insn);
3379 if (insn == 0 || !NOTE_P (insn))
3380 break;
3381 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3382 return NULL;
896aa4ea
DD
3383 }
3384
dc01c3d1 3385 return insn;
896aa4ea
DD
3386}
3387
b5b8b0ac
AO
3388/* Return the next insn after INSN that is not a DEBUG_INSN. This
3389 routine does not look inside SEQUENCEs. */
3390
eb51c837 3391rtx_insn *
30d2ef86 3392next_nondebug_insn (rtx_insn *insn)
b5b8b0ac
AO
3393{
3394 while (insn)
3395 {
3396 insn = NEXT_INSN (insn);
3397 if (insn == 0 || !DEBUG_INSN_P (insn))
3398 break;
3399 }
3400
dc01c3d1 3401 return insn;
b5b8b0ac
AO
3402}
3403
3404/* Return the previous insn before INSN that is not a DEBUG_INSN.
3405 This routine does not look inside SEQUENCEs. */
3406
eb51c837 3407rtx_insn *
30d2ef86 3408prev_nondebug_insn (rtx_insn *insn)
b5b8b0ac
AO
3409{
3410 while (insn)
3411 {
3412 insn = PREV_INSN (insn);
3413 if (insn == 0 || !DEBUG_INSN_P (insn))
3414 break;
3415 }
3416
dc01c3d1 3417 return insn;
b5b8b0ac
AO
3418}
3419
f0fc0803
JJ
3420/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3421 This routine does not look inside SEQUENCEs. */
3422
eb51c837 3423rtx_insn *
1f00691e 3424next_nonnote_nondebug_insn (rtx_insn *insn)
f0fc0803
JJ
3425{
3426 while (insn)
3427 {
3428 insn = NEXT_INSN (insn);
3429 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3430 break;
3431 }
3432
dc01c3d1 3433 return insn;
f0fc0803
JJ
3434}
3435
3436/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3437 This routine does not look inside SEQUENCEs. */
3438
eb51c837 3439rtx_insn *
1f00691e 3440prev_nonnote_nondebug_insn (rtx_insn *insn)
f0fc0803
JJ
3441{
3442 while (insn)
3443 {
3444 insn = PREV_INSN (insn);
3445 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3446 break;
3447 }
3448
dc01c3d1 3449 return insn;
f0fc0803
JJ
3450}
3451
23b2ce53
RS
3452/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3453 or 0, if there is none. This routine does not look inside
0f41302f 3454 SEQUENCEs. */
23b2ce53 3455
eb51c837 3456rtx_insn *
dc01c3d1 3457next_real_insn (rtx uncast_insn)
23b2ce53 3458{
dc01c3d1
DM
3459 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3460
75547801
KG
3461 while (insn)
3462 {
3463 insn = NEXT_INSN (insn);
3464 if (insn == 0 || INSN_P (insn))
3465 break;
3466 }
23b2ce53 3467
dc01c3d1 3468 return insn;
23b2ce53
RS
3469}
3470
3471/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3472 or 0, if there is none. This routine does not look inside
3473 SEQUENCEs. */
3474
eb51c837 3475rtx_insn *
d8fd56b2 3476prev_real_insn (rtx_insn *insn)
23b2ce53 3477{
75547801
KG
3478 while (insn)
3479 {
3480 insn = PREV_INSN (insn);
3481 if (insn == 0 || INSN_P (insn))
3482 break;
3483 }
23b2ce53 3484
dc01c3d1 3485 return insn;
23b2ce53
RS
3486}
3487
ee960939
OH
3488/* Return the last CALL_INSN in the current list, or 0 if there is none.
3489 This routine does not look inside SEQUENCEs. */
3490
049cfc4a 3491rtx_call_insn *
502b8322 3492last_call_insn (void)
ee960939 3493{
049cfc4a 3494 rtx_insn *insn;
ee960939
OH
3495
3496 for (insn = get_last_insn ();
4b4bf941 3497 insn && !CALL_P (insn);
ee960939
OH
3498 insn = PREV_INSN (insn))
3499 ;
3500
049cfc4a 3501 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3502}
3503
23b2ce53 3504/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3505 does not look inside SEQUENCEs. After reload this also skips over
3506 standalone USE and CLOBBER insn. */
23b2ce53 3507
69732dcb 3508int
7c9796ed 3509active_insn_p (const rtx_insn *insn)
69732dcb 3510{
4b4bf941 3511 return (CALL_P (insn) || JUMP_P (insn)
39718607 3512 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3513 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3514 && (! reload_completed
3515 || (GET_CODE (PATTERN (insn)) != USE
3516 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3517}
3518
eb51c837 3519rtx_insn *
7c9796ed 3520next_active_insn (rtx_insn *insn)
23b2ce53 3521{
75547801
KG
3522 while (insn)
3523 {
3524 insn = NEXT_INSN (insn);
3525 if (insn == 0 || active_insn_p (insn))
3526 break;
3527 }
23b2ce53 3528
dc01c3d1 3529 return insn;
23b2ce53
RS
3530}
3531
3532/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3533 does not look inside SEQUENCEs. After reload this also skips over
3534 standalone USE and CLOBBER insn. */
23b2ce53 3535
eb51c837 3536rtx_insn *
7c9796ed 3537prev_active_insn (rtx_insn *insn)
23b2ce53 3538{
75547801
KG
3539 while (insn)
3540 {
3541 insn = PREV_INSN (insn);
3542 if (insn == 0 || active_insn_p (insn))
3543 break;
3544 }
23b2ce53 3545
dc01c3d1 3546 return insn;
23b2ce53 3547}
23b2ce53 3548\f
23b2ce53
RS
3549/* Return the next insn that uses CC0 after INSN, which is assumed to
3550 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3551 applied to the result of this function should yield INSN).
3552
3553 Normally, this is simply the next insn. However, if a REG_CC_USER note
3554 is present, it contains the insn that uses CC0.
3555
3556 Return 0 if we can't find the insn. */
3557
75b46023 3558rtx_insn *
475edec0 3559next_cc0_user (rtx_insn *insn)
23b2ce53 3560{
906c4e36 3561 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3562
3563 if (note)
75b46023 3564 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3565
3566 insn = next_nonnote_insn (insn);
4b4bf941 3567 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3568 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3569
2c3c49de 3570 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3571 return insn;
23b2ce53
RS
3572
3573 return 0;
3574}
3575
3576/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3577 note, it is the previous insn. */
3578
75b46023 3579rtx_insn *
5c8db5b4 3580prev_cc0_setter (rtx_insn *insn)
23b2ce53 3581{
906c4e36 3582 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3583
3584 if (note)
75b46023 3585 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3586
3587 insn = prev_nonnote_insn (insn);
5b0264cb 3588 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3589
dc01c3d1 3590 return insn;
23b2ce53 3591}
e5bef2e4 3592
594f8779
RZ
3593/* Find a RTX_AUTOINC class rtx which matches DATA. */
3594
3595static int
9021b8ec 3596find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3597{
9021b8ec
RS
3598 subrtx_iterator::array_type array;
3599 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3600 {
9021b8ec
RS
3601 const_rtx x = *iter;
3602 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3603 && rtx_equal_p (reg, XEXP (x, 0)))
3604 return true;
594f8779 3605 }
9021b8ec 3606 return false;
594f8779 3607}
594f8779 3608
e5bef2e4
HB
3609/* Increment the label uses for all labels present in rtx. */
3610
3611static void
502b8322 3612mark_label_nuses (rtx x)
e5bef2e4 3613{
b3694847
SS
3614 enum rtx_code code;
3615 int i, j;
3616 const char *fmt;
e5bef2e4
HB
3617
3618 code = GET_CODE (x);
04a121a7
TS
3619 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3620 LABEL_NUSES (label_ref_label (x))++;
e5bef2e4
HB
3621
3622 fmt = GET_RTX_FORMAT (code);
3623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3624 {
3625 if (fmt[i] == 'e')
0fb7aeda 3626 mark_label_nuses (XEXP (x, i));
e5bef2e4 3627 else if (fmt[i] == 'E')
0fb7aeda 3628 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3629 mark_label_nuses (XVECEXP (x, i, j));
3630 }
3631}
3632
23b2ce53
RS
3633\f
3634/* Try splitting insns that can be split for better scheduling.
3635 PAT is the pattern which might split.
3636 TRIAL is the insn providing PAT.
cc2902df 3637 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3638
3639 If this routine succeeds in splitting, it returns the first or last
11147ebe 3640 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3641 returns TRIAL. If the insn to be returned can be split, it will be. */
3642
53f04688 3643rtx_insn *
bb5c4956 3644try_split (rtx pat, rtx_insn *trial, int last)
23b2ce53 3645{
d4eff95b 3646 rtx_insn *before, *after;
dc01c3d1
DM
3647 rtx note;
3648 rtx_insn *seq, *tem;
6b24c259 3649 int probability;
dc01c3d1 3650 rtx_insn *insn_last, *insn;
599aedd9 3651 int njumps = 0;
e67d1102 3652 rtx_insn *call_insn = NULL;
6b24c259 3653
cd9c1ca8
RH
3654 /* We're not good at redistributing frame information. */
3655 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3656 return trial;
cd9c1ca8 3657
6b24c259
JH
3658 if (any_condjump_p (trial)
3659 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
e5af9ddd 3660 split_branch_probability = XINT (note, 0);
6b24c259
JH
3661 probability = split_branch_probability;
3662
bb5c4956 3663 seq = split_insns (pat, trial);
6b24c259
JH
3664
3665 split_branch_probability = -1;
23b2ce53 3666
599aedd9 3667 if (!seq)
dc01c3d1 3668 return trial;
599aedd9
RH
3669
3670 /* Avoid infinite loop if any insn of the result matches
3671 the original pattern. */
3672 insn_last = seq;
3673 while (1)
23b2ce53 3674 {
599aedd9
RH
3675 if (INSN_P (insn_last)
3676 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3677 return trial;
599aedd9
RH
3678 if (!NEXT_INSN (insn_last))
3679 break;
3680 insn_last = NEXT_INSN (insn_last);
3681 }
750c9258 3682
6fb5fa3c
DB
3683 /* We will be adding the new sequence to the function. The splitters
3684 may have introduced invalid RTL sharing, so unshare the sequence now. */
3685 unshare_all_rtl_in_chain (seq);
3686
339ba33b 3687 /* Mark labels and copy flags. */
599aedd9
RH
3688 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3689 {
4b4bf941 3690 if (JUMP_P (insn))
599aedd9 3691 {
339ba33b
RS
3692 if (JUMP_P (trial))
3693 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3694 mark_jump_label (PATTERN (insn), insn, 0);
3695 njumps++;
3696 if (probability != -1
3697 && any_condjump_p (insn)
3698 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3699 {
599aedd9
RH
3700 /* We can preserve the REG_BR_PROB notes only if exactly
3701 one jump is created, otherwise the machine description
3702 is responsible for this step using
3703 split_branch_probability variable. */
5b0264cb 3704 gcc_assert (njumps == 1);
e5af9ddd 3705 add_int_reg_note (insn, REG_BR_PROB, probability);
2f937369 3706 }
599aedd9
RH
3707 }
3708 }
3709
3710 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3711 in SEQ and copy any additional information across. */
4b4bf941 3712 if (CALL_P (trial))
599aedd9
RH
3713 {
3714 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3715 if (CALL_P (insn))
599aedd9 3716 {
dc01c3d1
DM
3717 rtx_insn *next;
3718 rtx *p;
65712d5c 3719
4f660b15
RO
3720 gcc_assert (call_insn == NULL_RTX);
3721 call_insn = insn;
3722
65712d5c
RS
3723 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3724 target may have explicitly specified. */
3725 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3726 while (*p)
3727 p = &XEXP (*p, 1);
3728 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3729
3730 /* If the old call was a sibling call, the new one must
3731 be too. */
599aedd9 3732 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3733
3734 /* If the new call is the last instruction in the sequence,
3735 it will effectively replace the old call in-situ. Otherwise
3736 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3737 so that it comes immediately after the new call. */
3738 if (NEXT_INSN (insn))
65f3dedb
RS
3739 for (next = NEXT_INSN (trial);
3740 next && NOTE_P (next);
3741 next = NEXT_INSN (next))
3742 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3743 {
3744 remove_insn (next);
3745 add_insn_after (next, insn, NULL);
65f3dedb 3746 break;
65712d5c 3747 }
599aedd9
RH
3748 }
3749 }
4b5e8abe 3750
599aedd9
RH
3751 /* Copy notes, particularly those related to the CFG. */
3752 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3753 {
3754 switch (REG_NOTE_KIND (note))
3755 {
3756 case REG_EH_REGION:
1d65f45c 3757 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3758 break;
216183ce 3759
599aedd9
RH
3760 case REG_NORETURN:
3761 case REG_SETJMP:
0a35513e 3762 case REG_TM:
594f8779 3763 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3764 {
4b4bf941 3765 if (CALL_P (insn))
65c5f2a6 3766 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3767 }
599aedd9 3768 break;
d6e95df8 3769
599aedd9 3770 case REG_NON_LOCAL_GOTO:
594f8779 3771 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3772 {
4b4bf941 3773 if (JUMP_P (insn))
65c5f2a6 3774 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3775 }
599aedd9 3776 break;
e5bef2e4 3777
594f8779 3778 case REG_INC:
760edf20
TS
3779 if (!AUTO_INC_DEC)
3780 break;
3781
594f8779
RZ
3782 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3783 {
3784 rtx reg = XEXP (note, 0);
3785 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3786 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3787 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3788 }
3789 break;
594f8779 3790
9a08d230 3791 case REG_ARGS_SIZE:
e5b51ca0 3792 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
9a08d230
RH
3793 break;
3794
4f660b15
RO
3795 case REG_CALL_DECL:
3796 gcc_assert (call_insn != NULL_RTX);
3797 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3798 break;
3799
599aedd9
RH
3800 default:
3801 break;
23b2ce53 3802 }
599aedd9
RH
3803 }
3804
3805 /* If there are LABELS inside the split insns increment the
3806 usage count so we don't delete the label. */
cf7c4aa6 3807 if (INSN_P (trial))
599aedd9
RH
3808 {
3809 insn = insn_last;
3810 while (insn != NULL_RTX)
23b2ce53 3811 {
cf7c4aa6 3812 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3813 if (NONJUMP_INSN_P (insn))
599aedd9 3814 mark_label_nuses (PATTERN (insn));
23b2ce53 3815
599aedd9
RH
3816 insn = PREV_INSN (insn);
3817 }
23b2ce53
RS
3818 }
3819
d4eff95b
JC
3820 before = PREV_INSN (trial);
3821 after = NEXT_INSN (trial);
3822
5368224f 3823 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3824
3825 delete_insn (trial);
599aedd9
RH
3826
3827 /* Recursively call try_split for each new insn created; by the
3828 time control returns here that insn will be fully split, so
3829 set LAST and continue from the insn after the one returned.
3830 We can't use next_active_insn here since AFTER may be a note.
3831 Ignore deleted insns, which can be occur if not optimizing. */
3832 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4654c0cf 3833 if (! tem->deleted () && INSN_P (tem))
599aedd9
RH
3834 tem = try_split (PATTERN (tem), tem, 1);
3835
3836 /* Return either the first or the last insn, depending on which was
3837 requested. */
3838 return last
5936d944 3839 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3840 : NEXT_INSN (before);
23b2ce53
RS
3841}
3842\f
3843/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3844 Store PATTERN in the pattern slots. */
23b2ce53 3845
167b9fae 3846rtx_insn *
502b8322 3847make_insn_raw (rtx pattern)
23b2ce53 3848{
167b9fae 3849 rtx_insn *insn;
23b2ce53 3850
167b9fae 3851 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 3852
43127294 3853 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3854 PATTERN (insn) = pattern;
3855 INSN_CODE (insn) = -1;
1632afca 3856 REG_NOTES (insn) = NULL;
5368224f 3857 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3858 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3859
47984720
NC
3860#ifdef ENABLE_RTL_CHECKING
3861 if (insn
2c3c49de 3862 && INSN_P (insn)
47984720
NC
3863 && (returnjump_p (insn)
3864 || (GET_CODE (insn) == SET
3865 && SET_DEST (insn) == pc_rtx)))
3866 {
d4ee4d25 3867 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3868 debug_rtx (insn);
3869 }
3870#endif
750c9258 3871
23b2ce53
RS
3872 return insn;
3873}
3874
b5b8b0ac
AO
3875/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3876
167b9fae 3877static rtx_insn *
b5b8b0ac
AO
3878make_debug_insn_raw (rtx pattern)
3879{
167b9fae 3880 rtx_debug_insn *insn;
b5b8b0ac 3881
167b9fae 3882 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
3883 INSN_UID (insn) = cur_debug_insn_uid++;
3884 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3885 INSN_UID (insn) = cur_insn_uid++;
3886
3887 PATTERN (insn) = pattern;
3888 INSN_CODE (insn) = -1;
3889 REG_NOTES (insn) = NULL;
5368224f 3890 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3891 BLOCK_FOR_INSN (insn) = NULL;
3892
3893 return insn;
3894}
3895
2f937369 3896/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3897
167b9fae 3898static rtx_insn *
502b8322 3899make_jump_insn_raw (rtx pattern)
23b2ce53 3900{
167b9fae 3901 rtx_jump_insn *insn;
23b2ce53 3902
167b9fae 3903 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 3904 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3905
3906 PATTERN (insn) = pattern;
3907 INSN_CODE (insn) = -1;
1632afca
RS
3908 REG_NOTES (insn) = NULL;
3909 JUMP_LABEL (insn) = NULL;
5368224f 3910 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3911 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3912
3913 return insn;
3914}
aff507f4 3915
2f937369 3916/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 3917
167b9fae 3918static rtx_insn *
502b8322 3919make_call_insn_raw (rtx pattern)
aff507f4 3920{
167b9fae 3921 rtx_call_insn *insn;
aff507f4 3922
167b9fae 3923 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
3924 INSN_UID (insn) = cur_insn_uid++;
3925
3926 PATTERN (insn) = pattern;
3927 INSN_CODE (insn) = -1;
aff507f4
RK
3928 REG_NOTES (insn) = NULL;
3929 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3930 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3931 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3932
3933 return insn;
3934}
96fba521
SB
3935
3936/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3937
66e8df53 3938static rtx_note *
96fba521
SB
3939make_note_raw (enum insn_note subtype)
3940{
3941 /* Some notes are never created this way at all. These notes are
3942 only created by patching out insns. */
3943 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3944 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3945
66e8df53 3946 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
3947 INSN_UID (note) = cur_insn_uid++;
3948 NOTE_KIND (note) = subtype;
3949 BLOCK_FOR_INSN (note) = NULL;
3950 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3951 return note;
3952}
23b2ce53 3953\f
96fba521
SB
3954/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3955 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3956 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3957
3958static inline void
9152e0aa 3959link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 3960{
0f82e5c9
DM
3961 SET_PREV_INSN (insn) = prev;
3962 SET_NEXT_INSN (insn) = next;
96fba521
SB
3963 if (prev != NULL)
3964 {
0f82e5c9 3965 SET_NEXT_INSN (prev) = insn;
96fba521
SB
3966 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3967 {
e6eda746
DM
3968 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3969 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
3970 }
3971 }
3972 if (next != NULL)
3973 {
0f82e5c9 3974 SET_PREV_INSN (next) = insn;
96fba521 3975 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
3976 {
3977 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3978 SET_PREV_INSN (sequence->insn (0)) = insn;
3979 }
96fba521 3980 }
3ccb989e
SB
3981
3982 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3983 {
e6eda746
DM
3984 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3985 SET_PREV_INSN (sequence->insn (0)) = prev;
3986 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 3987 }
96fba521
SB
3988}
3989
23b2ce53
RS
3990/* Add INSN to the end of the doubly-linked list.
3991 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3992
3993void
9152e0aa 3994add_insn (rtx_insn *insn)
23b2ce53 3995{
9152e0aa 3996 rtx_insn *prev = get_last_insn ();
96fba521 3997 link_insn_into_chain (insn, prev, NULL);
5936d944
JH
3998 if (NULL == get_insns ())
3999 set_first_insn (insn);
5936d944 4000 set_last_insn (insn);
23b2ce53
RS
4001}
4002
96fba521 4003/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 4004
96fba521 4005static void
9152e0aa 4006add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 4007{
9152e0aa 4008 rtx_insn *next = NEXT_INSN (after);
23b2ce53 4009
4654c0cf 4010 gcc_assert (!optimize || !after->deleted ());
ba213285 4011
96fba521 4012 link_insn_into_chain (insn, after, next);
23b2ce53 4013
96fba521 4014 if (next == NULL)
23b2ce53 4015 {
614d5bd8
AM
4016 struct sequence_stack *seq;
4017
4018 for (seq = get_current_sequence (); seq; seq = seq->next)
4019 if (after == seq->last)
4020 {
4021 seq->last = insn;
4022 break;
4023 }
23b2ce53 4024 }
96fba521
SB
4025}
4026
4027/* Add INSN into the doubly-linked list before insn BEFORE. */
4028
4029static void
9152e0aa 4030add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4031{
9152e0aa 4032 rtx_insn *prev = PREV_INSN (before);
96fba521 4033
4654c0cf 4034 gcc_assert (!optimize || !before->deleted ());
96fba521
SB
4035
4036 link_insn_into_chain (insn, prev, before);
4037
4038 if (prev == NULL)
23b2ce53 4039 {
614d5bd8 4040 struct sequence_stack *seq;
a0ae8e8d 4041
614d5bd8
AM
4042 for (seq = get_current_sequence (); seq; seq = seq->next)
4043 if (before == seq->first)
4044 {
4045 seq->first = insn;
4046 break;
4047 }
4048
4049 gcc_assert (seq);
23b2ce53 4050 }
96fba521
SB
4051}
4052
4053/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4054 If BB is NULL, an attempt is made to infer the bb from before.
4055
4056 This and the next function should be the only functions called
4057 to insert an insn once delay slots have been filled since only
4058 they know how to update a SEQUENCE. */
23b2ce53 4059
96fba521 4060void
9152e0aa 4061add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
96fba521 4062{
1130d5e3 4063 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
9152e0aa 4064 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
96fba521 4065 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4066 if (!BARRIER_P (after)
4067 && !BARRIER_P (insn)
3c030e88
JH
4068 && (bb = BLOCK_FOR_INSN (after)))
4069 {
4070 set_block_for_insn (insn, bb);
38c1593d 4071 if (INSN_P (insn))
6fb5fa3c 4072 df_insn_rescan (insn);
3c030e88 4073 /* Should not happen as first in the BB is always
a1f300c0 4074 either NOTE or LABEL. */
a813c111 4075 if (BB_END (bb) == after
3c030e88 4076 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4077 && !BARRIER_P (insn)
a38e7aa5 4078 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4079 BB_END (bb) = insn;
3c030e88 4080 }
23b2ce53
RS
4081}
4082
96fba521
SB
4083/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4084 If BB is NULL, an attempt is made to infer the bb from before.
4085
4086 This and the previous function should be the only functions called
4087 to insert an insn once delay slots have been filled since only
4088 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4089
4090void
9152e0aa 4091add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
a0ae8e8d 4092{
9152e0aa
DM
4093 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4094 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
96fba521 4095 add_insn_before_nobb (insn, before);
a0ae8e8d 4096
b8698a0f 4097 if (!bb
6fb5fa3c
DB
4098 && !BARRIER_P (before)
4099 && !BARRIER_P (insn))
4100 bb = BLOCK_FOR_INSN (before);
4101
4102 if (bb)
3c030e88
JH
4103 {
4104 set_block_for_insn (insn, bb);
38c1593d 4105 if (INSN_P (insn))
6fb5fa3c 4106 df_insn_rescan (insn);
5b0264cb 4107 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4108 LABEL. */
5b0264cb
NS
4109 gcc_assert (BB_HEAD (bb) != insn
4110 /* Avoid clobbering of structure when creating new BB. */
4111 || BARRIER_P (insn)
a38e7aa5 4112 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4113 }
a0ae8e8d
RK
4114}
4115
6fb5fa3c
DB
4116/* Replace insn with an deleted instruction note. */
4117
0ce2b299
EB
4118void
4119set_insn_deleted (rtx insn)
6fb5fa3c 4120{
39718607 4121 if (INSN_P (insn))
b2908ba6 4122 df_insn_delete (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
4123 PUT_CODE (insn, NOTE);
4124 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4125}
4126
4127
1f397f45
SB
4128/* Unlink INSN from the insn chain.
4129
4130 This function knows how to handle sequences.
4131
4132 This function does not invalidate data flow information associated with
4133 INSN (i.e. does not call df_insn_delete). That makes this function
4134 usable for only disconnecting an insn from the chain, and re-emit it
4135 elsewhere later.
4136
4137 To later insert INSN elsewhere in the insn chain via add_insn and
4138 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4139 the caller. Nullifying them here breaks many insn chain walks.
4140
4141 To really delete an insn and related DF information, use delete_insn. */
4142
89e99eea 4143void
dc01c3d1 4144remove_insn (rtx uncast_insn)
89e99eea 4145{
dc01c3d1 4146 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1130d5e3
DM
4147 rtx_insn *next = NEXT_INSN (insn);
4148 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4149 basic_block bb;
4150
89e99eea
DB
4151 if (prev)
4152 {
0f82e5c9 4153 SET_NEXT_INSN (prev) = next;
4b4bf941 4154 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4155 {
e6eda746
DM
4156 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4157 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4158 }
4159 }
89e99eea
DB
4160 else
4161 {
614d5bd8
AM
4162 struct sequence_stack *seq;
4163
4164 for (seq = get_current_sequence (); seq; seq = seq->next)
4165 if (insn == seq->first)
89e99eea 4166 {
614d5bd8 4167 seq->first = next;
89e99eea
DB
4168 break;
4169 }
4170
614d5bd8 4171 gcc_assert (seq);
89e99eea
DB
4172 }
4173
4174 if (next)
4175 {
0f82e5c9 4176 SET_PREV_INSN (next) = prev;
4b4bf941 4177 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4178 {
4179 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4180 SET_PREV_INSN (sequence->insn (0)) = prev;
4181 }
89e99eea 4182 }
89e99eea
DB
4183 else
4184 {
614d5bd8
AM
4185 struct sequence_stack *seq;
4186
4187 for (seq = get_current_sequence (); seq; seq = seq->next)
4188 if (insn == seq->last)
89e99eea 4189 {
614d5bd8 4190 seq->last = prev;
89e99eea
DB
4191 break;
4192 }
4193
614d5bd8 4194 gcc_assert (seq);
89e99eea 4195 }
80eb8028 4196
80eb8028 4197 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4198 if (!BARRIER_P (insn)
53c17031
JH
4199 && (bb = BLOCK_FOR_INSN (insn)))
4200 {
a813c111 4201 if (BB_HEAD (bb) == insn)
53c17031 4202 {
3bf1e984
RK
4203 /* Never ever delete the basic block note without deleting whole
4204 basic block. */
5b0264cb 4205 gcc_assert (!NOTE_P (insn));
1130d5e3 4206 BB_HEAD (bb) = next;
53c17031 4207 }
a813c111 4208 if (BB_END (bb) == insn)
1130d5e3 4209 BB_END (bb) = prev;
53c17031 4210 }
89e99eea
DB
4211}
4212
ee960939
OH
4213/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4214
4215void
502b8322 4216add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4217{
5b0264cb 4218 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4219
4220 /* Put the register usage information on the CALL. If there is already
4221 some usage information, put ours at the end. */
4222 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4223 {
4224 rtx link;
4225
4226 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4227 link = XEXP (link, 1))
4228 ;
4229
4230 XEXP (link, 1) = call_fusage;
4231 }
4232 else
4233 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4234}
4235
23b2ce53
RS
4236/* Delete all insns made since FROM.
4237 FROM becomes the new last instruction. */
4238
4239void
fee3e72c 4240delete_insns_since (rtx_insn *from)
23b2ce53
RS
4241{
4242 if (from == 0)
5936d944 4243 set_first_insn (0);
23b2ce53 4244 else
0f82e5c9 4245 SET_NEXT_INSN (from) = 0;
5936d944 4246 set_last_insn (from);
23b2ce53
RS
4247}
4248
5dab5552
MS
4249/* This function is deprecated, please use sequences instead.
4250
4251 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4252 The insns to be moved are those between FROM and TO.
4253 They are moved to a new position after the insn AFTER.
4254 AFTER must not be FROM or TO or any insn in between.
4255
4256 This function does not know about SEQUENCEs and hence should not be
4257 called after delay-slot filling has been done. */
4258
4259void
fee3e72c 4260reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4261{
b2b29377
MM
4262 if (flag_checking)
4263 {
4264 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4265 gcc_assert (after != x);
4266 gcc_assert (after != to);
4267 }
4f8344eb 4268
23b2ce53
RS
4269 /* Splice this bunch out of where it is now. */
4270 if (PREV_INSN (from))
0f82e5c9 4271 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4272 if (NEXT_INSN (to))
0f82e5c9 4273 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4274 if (get_last_insn () == to)
4275 set_last_insn (PREV_INSN (from));
4276 if (get_insns () == from)
4277 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4278
4279 /* Make the new neighbors point to it and it to them. */
4280 if (NEXT_INSN (after))
0f82e5c9 4281 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4282
0f82e5c9
DM
4283 SET_NEXT_INSN (to) = NEXT_INSN (after);
4284 SET_PREV_INSN (from) = after;
4285 SET_NEXT_INSN (after) = from;
c3284718 4286 if (after == get_last_insn ())
5936d944 4287 set_last_insn (to);
23b2ce53
RS
4288}
4289
3c030e88
JH
4290/* Same as function above, but take care to update BB boundaries. */
4291void
ac9d2d2c 4292reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4293{
ac9d2d2c 4294 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4295 basic_block bb, bb2;
4296
4297 reorder_insns_nobb (from, to, after);
4298
4b4bf941 4299 if (!BARRIER_P (after)
3c030e88
JH
4300 && (bb = BLOCK_FOR_INSN (after)))
4301 {
b2908ba6 4302 rtx_insn *x;
6fb5fa3c 4303 df_set_bb_dirty (bb);
68252e27 4304
4b4bf941 4305 if (!BARRIER_P (from)
3c030e88
JH
4306 && (bb2 = BLOCK_FOR_INSN (from)))
4307 {
a813c111 4308 if (BB_END (bb2) == to)
1130d5e3 4309 BB_END (bb2) = prev;
6fb5fa3c 4310 df_set_bb_dirty (bb2);
3c030e88
JH
4311 }
4312
a813c111 4313 if (BB_END (bb) == after)
1130d5e3 4314 BB_END (bb) = to;
3c030e88
JH
4315
4316 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4317 if (!BARRIER_P (x))
63642d5a 4318 df_insn_change_bb (x, bb);
3c030e88
JH
4319 }
4320}
4321
23b2ce53 4322\f
2f937369
DM
4323/* Emit insn(s) of given code and pattern
4324 at a specified place within the doubly-linked list.
23b2ce53 4325
2f937369
DM
4326 All of the emit_foo global entry points accept an object
4327 X which is either an insn list or a PATTERN of a single
4328 instruction.
23b2ce53 4329
2f937369
DM
4330 There are thus a few canonical ways to generate code and
4331 emit it at a specific place in the instruction stream. For
4332 example, consider the instruction named SPOT and the fact that
4333 we would like to emit some instructions before SPOT. We might
4334 do it like this:
23b2ce53 4335
2f937369
DM
4336 start_sequence ();
4337 ... emit the new instructions ...
4338 insns_head = get_insns ();
4339 end_sequence ();
23b2ce53 4340
2f937369 4341 emit_insn_before (insns_head, SPOT);
23b2ce53 4342
2f937369
DM
4343 It used to be common to generate SEQUENCE rtl instead, but that
4344 is a relic of the past which no longer occurs. The reason is that
4345 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4346 generated would almost certainly die right after it was created. */
23b2ce53 4347
cd459bf8 4348static rtx_insn *
5f02387d 4349emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
167b9fae 4350 rtx_insn *(*make_raw) (rtx))
23b2ce53 4351{
167b9fae 4352 rtx_insn *insn;
23b2ce53 4353
5b0264cb 4354 gcc_assert (before);
2f937369
DM
4355
4356 if (x == NULL_RTX)
cd459bf8 4357 return safe_as_a <rtx_insn *> (last);
2f937369
DM
4358
4359 switch (GET_CODE (x))
23b2ce53 4360 {
b5b8b0ac 4361 case DEBUG_INSN:
2f937369
DM
4362 case INSN:
4363 case JUMP_INSN:
4364 case CALL_INSN:
4365 case CODE_LABEL:
4366 case BARRIER:
4367 case NOTE:
167b9fae 4368 insn = as_a <rtx_insn *> (x);
2f937369
DM
4369 while (insn)
4370 {
167b9fae 4371 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4372 add_insn_before (insn, before, bb);
2f937369
DM
4373 last = insn;
4374 insn = next;
4375 }
4376 break;
4377
4378#ifdef ENABLE_RTL_CHECKING
4379 case SEQUENCE:
5b0264cb 4380 gcc_unreachable ();
2f937369
DM
4381 break;
4382#endif
4383
4384 default:
5f02387d 4385 last = (*make_raw) (x);
6fb5fa3c 4386 add_insn_before (last, before, bb);
2f937369 4387 break;
23b2ce53
RS
4388 }
4389
cd459bf8 4390 return safe_as_a <rtx_insn *> (last);
23b2ce53
RS
4391}
4392
5f02387d
NF
4393/* Make X be output before the instruction BEFORE. */
4394
cd459bf8 4395rtx_insn *
596f2b17 4396emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f02387d
NF
4397{
4398 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4399}
4400
2f937369 4401/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4402 and output it before the instruction BEFORE. */
4403
1476d1bd 4404rtx_jump_insn *
596f2b17 4405emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
23b2ce53 4406{
1476d1bd
MM
4407 return as_a <rtx_jump_insn *> (
4408 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4409 make_jump_insn_raw));
23b2ce53
RS
4410}
4411
2f937369 4412/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4413 and output it before the instruction BEFORE. */
4414
cd459bf8 4415rtx_insn *
596f2b17 4416emit_call_insn_before_noloc (rtx x, rtx_insn *before)
969d70ca 4417{
5f02387d
NF
4418 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4419 make_call_insn_raw);
969d70ca
JH
4420}
4421
b5b8b0ac
AO
4422/* Make an instruction with body X and code DEBUG_INSN
4423 and output it before the instruction BEFORE. */
4424
cd459bf8 4425rtx_insn *
b5b8b0ac
AO
4426emit_debug_insn_before_noloc (rtx x, rtx before)
4427{
5f02387d
NF
4428 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4429 make_debug_insn_raw);
b5b8b0ac
AO
4430}
4431
23b2ce53 4432/* Make an insn of code BARRIER
e881bb1b 4433 and output it before the insn BEFORE. */
23b2ce53 4434
cd459bf8 4435rtx_barrier *
502b8322 4436emit_barrier_before (rtx before)
23b2ce53 4437{
cd459bf8 4438 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4439
4440 INSN_UID (insn) = cur_insn_uid++;
4441
6fb5fa3c 4442 add_insn_before (insn, before, NULL);
23b2ce53
RS
4443 return insn;
4444}
4445
e881bb1b
RH
4446/* Emit the label LABEL before the insn BEFORE. */
4447
1476d1bd 4448rtx_code_label *
596f2b17 4449emit_label_before (rtx label, rtx_insn *before)
e881bb1b 4450{
468660d3
SB
4451 gcc_checking_assert (INSN_UID (label) == 0);
4452 INSN_UID (label) = cur_insn_uid++;
4453 add_insn_before (label, before, NULL);
1476d1bd 4454 return as_a <rtx_code_label *> (label);
e881bb1b 4455}
23b2ce53 4456\f
2f937369
DM
4457/* Helper for emit_insn_after, handles lists of instructions
4458 efficiently. */
23b2ce53 4459
e6eda746
DM
4460static rtx_insn *
4461emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
23b2ce53 4462{
e6eda746 4463 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
1130d5e3
DM
4464 rtx_insn *last;
4465 rtx_insn *after_after;
6fb5fa3c
DB
4466 if (!bb && !BARRIER_P (after))
4467 bb = BLOCK_FOR_INSN (after);
23b2ce53 4468
6fb5fa3c 4469 if (bb)
23b2ce53 4470 {
6fb5fa3c 4471 df_set_bb_dirty (bb);
2f937369 4472 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4473 if (!BARRIER_P (last))
6fb5fa3c
DB
4474 {
4475 set_block_for_insn (last, bb);
4476 df_insn_rescan (last);
4477 }
4b4bf941 4478 if (!BARRIER_P (last))
6fb5fa3c
DB
4479 {
4480 set_block_for_insn (last, bb);
4481 df_insn_rescan (last);
4482 }
a813c111 4483 if (BB_END (bb) == after)
1130d5e3 4484 BB_END (bb) = last;
23b2ce53
RS
4485 }
4486 else
2f937369
DM
4487 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4488 continue;
4489
4490 after_after = NEXT_INSN (after);
4491
0f82e5c9
DM
4492 SET_NEXT_INSN (after) = first;
4493 SET_PREV_INSN (first) = after;
4494 SET_NEXT_INSN (last) = after_after;
2f937369 4495 if (after_after)
0f82e5c9 4496 SET_PREV_INSN (after_after) = last;
2f937369 4497
c3284718 4498 if (after == get_last_insn ())
5936d944 4499 set_last_insn (last);
e855c69d 4500
2f937369
DM
4501 return last;
4502}
4503
cd459bf8 4504static rtx_insn *
e6eda746 4505emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
167b9fae 4506 rtx_insn *(*make_raw)(rtx))
2f937369 4507{
e6eda746
DM
4508 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4509 rtx_insn *last = after;
2f937369 4510
5b0264cb 4511 gcc_assert (after);
2f937369
DM
4512
4513 if (x == NULL_RTX)
e6eda746 4514 return last;
2f937369
DM
4515
4516 switch (GET_CODE (x))
23b2ce53 4517 {
b5b8b0ac 4518 case DEBUG_INSN:
2f937369
DM
4519 case INSN:
4520 case JUMP_INSN:
4521 case CALL_INSN:
4522 case CODE_LABEL:
4523 case BARRIER:
4524 case NOTE:
1130d5e3 4525 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4526 break;
4527
4528#ifdef ENABLE_RTL_CHECKING
4529 case SEQUENCE:
5b0264cb 4530 gcc_unreachable ();
2f937369
DM
4531 break;
4532#endif
4533
4534 default:
5f02387d 4535 last = (*make_raw) (x);
6fb5fa3c 4536 add_insn_after (last, after, bb);
2f937369 4537 break;
23b2ce53
RS
4538 }
4539
e6eda746 4540 return last;
23b2ce53
RS
4541}
4542
5f02387d
NF
4543/* Make X be output after the insn AFTER and set the BB of insn. If
4544 BB is NULL, an attempt is made to infer the BB from AFTER. */
4545
cd459bf8 4546rtx_insn *
5f02387d
NF
4547emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4548{
4549 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4550}
4551
255680cf 4552
2f937369 4553/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4554 and output it after the insn AFTER. */
4555
1476d1bd 4556rtx_jump_insn *
a7102479 4557emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4558{
1476d1bd
MM
4559 return as_a <rtx_jump_insn *> (
4560 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
2f937369
DM
4561}
4562
4563/* Make an instruction with body X and code CALL_INSN
4564 and output it after the instruction AFTER. */
4565
cd459bf8 4566rtx_insn *
a7102479 4567emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4568{
5f02387d 4569 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4570}
4571
b5b8b0ac
AO
4572/* Make an instruction with body X and code CALL_INSN
4573 and output it after the instruction AFTER. */
4574
cd459bf8 4575rtx_insn *
b5b8b0ac
AO
4576emit_debug_insn_after_noloc (rtx x, rtx after)
4577{
5f02387d 4578 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4579}
4580
23b2ce53
RS
4581/* Make an insn of code BARRIER
4582 and output it after the insn AFTER. */
4583
cd459bf8 4584rtx_barrier *
502b8322 4585emit_barrier_after (rtx after)
23b2ce53 4586{
cd459bf8 4587 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4588
4589 INSN_UID (insn) = cur_insn_uid++;
4590
6fb5fa3c 4591 add_insn_after (insn, after, NULL);
23b2ce53
RS
4592 return insn;
4593}
4594
4595/* Emit the label LABEL after the insn AFTER. */
4596
cd459bf8 4597rtx_insn *
596f2b17 4598emit_label_after (rtx label, rtx_insn *after)
23b2ce53 4599{
468660d3
SB
4600 gcc_checking_assert (INSN_UID (label) == 0);
4601 INSN_UID (label) = cur_insn_uid++;
4602 add_insn_after (label, after, NULL);
cd459bf8 4603 return as_a <rtx_insn *> (label);
23b2ce53 4604}
96fba521
SB
4605\f
4606/* Notes require a bit of special handling: Some notes need to have their
4607 BLOCK_FOR_INSN set, others should never have it set, and some should
4608 have it set or clear depending on the context. */
4609
4610/* Return true iff a note of kind SUBTYPE should be emitted with routines
4611 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4612 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4613
4614static bool
4615note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4616{
4617 switch (subtype)
4618 {
4619 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4620 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4621 return true;
4622
4623 /* Notes for var tracking and EH region markers can appear between or
4624 inside basic blocks. If the caller is emitting on the basic block
4625 boundary, do not set BLOCK_FOR_INSN on the new note. */
4626 case NOTE_INSN_VAR_LOCATION:
4627 case NOTE_INSN_CALL_ARG_LOCATION:
4628 case NOTE_INSN_EH_REGION_BEG:
4629 case NOTE_INSN_EH_REGION_END:
4630 return on_bb_boundary_p;
4631
4632 /* Otherwise, BLOCK_FOR_INSN must be set. */
4633 default:
4634 return false;
4635 }
4636}
23b2ce53
RS
4637
4638/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4639
66e8df53 4640rtx_note *
589e43f9 4641emit_note_after (enum insn_note subtype, rtx_insn *after)
23b2ce53 4642{
66e8df53 4643 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4644 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4645 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4646
4647 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4648 add_insn_after_nobb (note, after);
4649 else
4650 add_insn_after (note, after, bb);
4651 return note;
4652}
4653
4654/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4655
66e8df53 4656rtx_note *
89b6250d 4657emit_note_before (enum insn_note subtype, rtx_insn *before)
96fba521 4658{
66e8df53 4659 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4660 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4661 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4662
4663 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4664 add_insn_before_nobb (note, before);
4665 else
4666 add_insn_before (note, before, bb);
23b2ce53
RS
4667 return note;
4668}
23b2ce53 4669\f
e8110d6f
NF
4670/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4671 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4672
cd459bf8 4673static rtx_insn *
dc01c3d1 4674emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
167b9fae 4675 rtx_insn *(*make_raw) (rtx))
0d682900 4676{
dc01c3d1 4677 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
e67d1102 4678 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4679
a7102479 4680 if (pattern == NULL_RTX || !loc)
e67d1102 4681 return last;
dd3adcf8 4682
2f937369
DM
4683 after = NEXT_INSN (after);
4684 while (1)
4685 {
20d4397a
EB
4686 if (active_insn_p (after)
4687 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4688 && !INSN_LOCATION (after))
5368224f 4689 INSN_LOCATION (after) = loc;
2f937369
DM
4690 if (after == last)
4691 break;
4692 after = NEXT_INSN (after);
4693 }
e67d1102 4694 return last;
0d682900
JH
4695}
4696
e8110d6f
NF
4697/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4698 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4699 any DEBUG_INSNs. */
4700
cd459bf8 4701static rtx_insn *
dc01c3d1 4702emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
167b9fae 4703 rtx_insn *(*make_raw) (rtx))
a7102479 4704{
dc01c3d1
DM
4705 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4706 rtx_insn *prev = after;
b5b8b0ac 4707
e8110d6f
NF
4708 if (skip_debug_insns)
4709 while (DEBUG_INSN_P (prev))
4710 prev = PREV_INSN (prev);
b5b8b0ac
AO
4711
4712 if (INSN_P (prev))
5368224f 4713 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4714 make_raw);
a7102479 4715 else
e8110d6f 4716 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4717}
4718
5368224f 4719/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4720rtx_insn *
e8110d6f 4721emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4722{
e8110d6f
NF
4723 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4724}
2f937369 4725
5368224f 4726/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4727rtx_insn *
e8110d6f
NF
4728emit_insn_after (rtx pattern, rtx after)
4729{
4730 return emit_pattern_after (pattern, after, true, make_insn_raw);
4731}
dd3adcf8 4732
5368224f 4733/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4734rtx_jump_insn *
e8110d6f
NF
4735emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4736{
1476d1bd
MM
4737 return as_a <rtx_jump_insn *> (
4738 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
0d682900
JH
4739}
4740
5368224f 4741/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
1476d1bd 4742rtx_jump_insn *
a7102479
JH
4743emit_jump_insn_after (rtx pattern, rtx after)
4744{
1476d1bd
MM
4745 return as_a <rtx_jump_insn *> (
4746 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
a7102479
JH
4747}
4748
5368224f 4749/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4750rtx_insn *
502b8322 4751emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4752{
e8110d6f 4753 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4754}
4755
5368224f 4756/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4757rtx_insn *
a7102479
JH
4758emit_call_insn_after (rtx pattern, rtx after)
4759{
e8110d6f 4760 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4761}
4762
5368224f 4763/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4764rtx_insn *
b5b8b0ac
AO
4765emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4766{
e8110d6f 4767 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4768}
4769
5368224f 4770/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4771rtx_insn *
b5b8b0ac
AO
4772emit_debug_insn_after (rtx pattern, rtx after)
4773{
e8110d6f 4774 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4775}
4776
e8110d6f
NF
4777/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4778 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4779 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4780 CALL_INSN, etc. */
4781
cd459bf8 4782static rtx_insn *
dc01c3d1 4783emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
167b9fae 4784 rtx_insn *(*make_raw) (rtx))
0d682900 4785{
dc01c3d1
DM
4786 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4787 rtx_insn *first = PREV_INSN (before);
4788 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4789 insnp ? before : NULL_RTX,
4790 NULL, make_raw);
a7102479
JH
4791
4792 if (pattern == NULL_RTX || !loc)
dc01c3d1 4793 return last;
a7102479 4794
26cb3993
JH
4795 if (!first)
4796 first = get_insns ();
4797 else
4798 first = NEXT_INSN (first);
a7102479
JH
4799 while (1)
4800 {
20d4397a
EB
4801 if (active_insn_p (first)
4802 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4803 && !INSN_LOCATION (first))
5368224f 4804 INSN_LOCATION (first) = loc;
a7102479
JH
4805 if (first == last)
4806 break;
4807 first = NEXT_INSN (first);
4808 }
dc01c3d1 4809 return last;
a7102479
JH
4810}
4811
e8110d6f
NF
4812/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4813 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4814 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4815 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4816
cd459bf8 4817static rtx_insn *
dc01c3d1 4818emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
167b9fae 4819 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 4820{
dc01c3d1
DM
4821 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4822 rtx_insn *next = before;
b5b8b0ac 4823
e8110d6f
NF
4824 if (skip_debug_insns)
4825 while (DEBUG_INSN_P (next))
4826 next = PREV_INSN (next);
b5b8b0ac
AO
4827
4828 if (INSN_P (next))
5368224f 4829 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4830 insnp, make_raw);
a7102479 4831 else
e8110d6f 4832 return emit_pattern_before_noloc (pattern, before,
9b2ea071 4833 insnp ? before : NULL_RTX,
e8110d6f 4834 NULL, make_raw);
a7102479
JH
4835}
4836
5368224f 4837/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4838rtx_insn *
596f2b17 4839emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4840{
e8110d6f
NF
4841 return emit_pattern_before_setloc (pattern, before, loc, true,
4842 make_insn_raw);
4843}
a7102479 4844
5368224f 4845/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4846rtx_insn *
e8110d6f
NF
4847emit_insn_before (rtx pattern, rtx before)
4848{
4849 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4850}
a7102479 4851
5368224f 4852/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4853rtx_jump_insn *
596f2b17 4854emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
e8110d6f 4855{
1476d1bd
MM
4856 return as_a <rtx_jump_insn *> (
4857 emit_pattern_before_setloc (pattern, before, loc, false,
4858 make_jump_insn_raw));
a7102479
JH
4859}
4860
5368224f 4861/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
1476d1bd 4862rtx_jump_insn *
a7102479
JH
4863emit_jump_insn_before (rtx pattern, rtx before)
4864{
1476d1bd
MM
4865 return as_a <rtx_jump_insn *> (
4866 emit_pattern_before (pattern, before, true, false,
4867 make_jump_insn_raw));
a7102479
JH
4868}
4869
5368224f 4870/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4871rtx_insn *
596f2b17 4872emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4873{
e8110d6f
NF
4874 return emit_pattern_before_setloc (pattern, before, loc, false,
4875 make_call_insn_raw);
0d682900 4876}
a7102479 4877
e8110d6f 4878/* Like emit_call_insn_before_noloc,
5368224f 4879 but set insn_location according to BEFORE. */
cd459bf8 4880rtx_insn *
596f2b17 4881emit_call_insn_before (rtx pattern, rtx_insn *before)
a7102479 4882{
e8110d6f
NF
4883 return emit_pattern_before (pattern, before, true, false,
4884 make_call_insn_raw);
a7102479 4885}
b5b8b0ac 4886
5368224f 4887/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4888rtx_insn *
b5b8b0ac
AO
4889emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4890{
e8110d6f
NF
4891 return emit_pattern_before_setloc (pattern, before, loc, false,
4892 make_debug_insn_raw);
b5b8b0ac
AO
4893}
4894
e8110d6f 4895/* Like emit_debug_insn_before_noloc,
5368224f 4896 but set insn_location according to BEFORE. */
cd459bf8 4897rtx_insn *
3a6216b0 4898emit_debug_insn_before (rtx pattern, rtx_insn *before)
b5b8b0ac 4899{
e8110d6f
NF
4900 return emit_pattern_before (pattern, before, false, false,
4901 make_debug_insn_raw);
b5b8b0ac 4902}
0d682900 4903\f
2f937369
DM
4904/* Take X and emit it at the end of the doubly-linked
4905 INSN list.
23b2ce53
RS
4906
4907 Returns the last insn emitted. */
4908
cd459bf8 4909rtx_insn *
502b8322 4910emit_insn (rtx x)
23b2ce53 4911{
cd459bf8
DM
4912 rtx_insn *last = get_last_insn ();
4913 rtx_insn *insn;
23b2ce53 4914
2f937369
DM
4915 if (x == NULL_RTX)
4916 return last;
23b2ce53 4917
2f937369
DM
4918 switch (GET_CODE (x))
4919 {
b5b8b0ac 4920 case DEBUG_INSN:
2f937369
DM
4921 case INSN:
4922 case JUMP_INSN:
4923 case CALL_INSN:
4924 case CODE_LABEL:
4925 case BARRIER:
4926 case NOTE:
cd459bf8 4927 insn = as_a <rtx_insn *> (x);
2f937369 4928 while (insn)
23b2ce53 4929 {
cd459bf8 4930 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 4931 add_insn (insn);
2f937369
DM
4932 last = insn;
4933 insn = next;
23b2ce53 4934 }
2f937369 4935 break;
23b2ce53 4936
2f937369 4937#ifdef ENABLE_RTL_CHECKING
39718607 4938 case JUMP_TABLE_DATA:
2f937369 4939 case SEQUENCE:
5b0264cb 4940 gcc_unreachable ();
2f937369
DM
4941 break;
4942#endif
23b2ce53 4943
2f937369
DM
4944 default:
4945 last = make_insn_raw (x);
4946 add_insn (last);
4947 break;
23b2ce53
RS
4948 }
4949
4950 return last;
4951}
4952
b5b8b0ac
AO
4953/* Make an insn of code DEBUG_INSN with pattern X
4954 and add it to the end of the doubly-linked list. */
4955
cd459bf8 4956rtx_insn *
b5b8b0ac
AO
4957emit_debug_insn (rtx x)
4958{
cd459bf8
DM
4959 rtx_insn *last = get_last_insn ();
4960 rtx_insn *insn;
b5b8b0ac
AO
4961
4962 if (x == NULL_RTX)
4963 return last;
4964
4965 switch (GET_CODE (x))
4966 {
4967 case DEBUG_INSN:
4968 case INSN:
4969 case JUMP_INSN:
4970 case CALL_INSN:
4971 case CODE_LABEL:
4972 case BARRIER:
4973 case NOTE:
cd459bf8 4974 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
4975 while (insn)
4976 {
cd459bf8 4977 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
4978 add_insn (insn);
4979 last = insn;
4980 insn = next;
4981 }
4982 break;
4983
4984#ifdef ENABLE_RTL_CHECKING
39718607 4985 case JUMP_TABLE_DATA:
b5b8b0ac
AO
4986 case SEQUENCE:
4987 gcc_unreachable ();
4988 break;
4989#endif
4990
4991 default:
4992 last = make_debug_insn_raw (x);
4993 add_insn (last);
4994 break;
4995 }
4996
4997 return last;
4998}
4999
2f937369
DM
5000/* Make an insn of code JUMP_INSN with pattern X
5001 and add it to the end of the doubly-linked list. */
23b2ce53 5002
cd459bf8 5003rtx_insn *
502b8322 5004emit_jump_insn (rtx x)
23b2ce53 5005{
cd459bf8
DM
5006 rtx_insn *last = NULL;
5007 rtx_insn *insn;
23b2ce53 5008
2f937369 5009 switch (GET_CODE (x))
23b2ce53 5010 {
b5b8b0ac 5011 case DEBUG_INSN:
2f937369
DM
5012 case INSN:
5013 case JUMP_INSN:
5014 case CALL_INSN:
5015 case CODE_LABEL:
5016 case BARRIER:
5017 case NOTE:
cd459bf8 5018 insn = as_a <rtx_insn *> (x);
2f937369
DM
5019 while (insn)
5020 {
cd459bf8 5021 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5022 add_insn (insn);
5023 last = insn;
5024 insn = next;
5025 }
5026 break;
e0a5c5eb 5027
2f937369 5028#ifdef ENABLE_RTL_CHECKING
39718607 5029 case JUMP_TABLE_DATA:
2f937369 5030 case SEQUENCE:
5b0264cb 5031 gcc_unreachable ();
2f937369
DM
5032 break;
5033#endif
e0a5c5eb 5034
2f937369
DM
5035 default:
5036 last = make_jump_insn_raw (x);
5037 add_insn (last);
5038 break;
3c030e88 5039 }
e0a5c5eb
RS
5040
5041 return last;
5042}
5043
2f937369 5044/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5045 and add it to the end of the doubly-linked list. */
5046
cd459bf8 5047rtx_insn *
502b8322 5048emit_call_insn (rtx x)
23b2ce53 5049{
cd459bf8 5050 rtx_insn *insn;
2f937369
DM
5051
5052 switch (GET_CODE (x))
23b2ce53 5053 {
b5b8b0ac 5054 case DEBUG_INSN:
2f937369
DM
5055 case INSN:
5056 case JUMP_INSN:
5057 case CALL_INSN:
5058 case CODE_LABEL:
5059 case BARRIER:
5060 case NOTE:
5061 insn = emit_insn (x);
5062 break;
23b2ce53 5063
2f937369
DM
5064#ifdef ENABLE_RTL_CHECKING
5065 case SEQUENCE:
39718607 5066 case JUMP_TABLE_DATA:
5b0264cb 5067 gcc_unreachable ();
2f937369
DM
5068 break;
5069#endif
23b2ce53 5070
2f937369
DM
5071 default:
5072 insn = make_call_insn_raw (x);
23b2ce53 5073 add_insn (insn);
2f937369 5074 break;
23b2ce53 5075 }
2f937369
DM
5076
5077 return insn;
23b2ce53
RS
5078}
5079
5080/* Add the label LABEL to the end of the doubly-linked list. */
5081
1476d1bd
MM
5082rtx_code_label *
5083emit_label (rtx uncast_label)
23b2ce53 5084{
1476d1bd
MM
5085 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5086
468660d3
SB
5087 gcc_checking_assert (INSN_UID (label) == 0);
5088 INSN_UID (label) = cur_insn_uid++;
1476d1bd
MM
5089 add_insn (label);
5090 return label;
23b2ce53
RS
5091}
5092
39718607
SB
5093/* Make an insn of code JUMP_TABLE_DATA
5094 and add it to the end of the doubly-linked list. */
5095
4598afdd 5096rtx_jump_table_data *
39718607
SB
5097emit_jump_table_data (rtx table)
5098{
4598afdd
DM
5099 rtx_jump_table_data *jump_table_data =
5100 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5101 INSN_UID (jump_table_data) = cur_insn_uid++;
5102 PATTERN (jump_table_data) = table;
5103 BLOCK_FOR_INSN (jump_table_data) = NULL;
5104 add_insn (jump_table_data);
5105 return jump_table_data;
5106}
5107
23b2ce53
RS
5108/* Make an insn of code BARRIER
5109 and add it to the end of the doubly-linked list. */
5110
cd459bf8 5111rtx_barrier *
502b8322 5112emit_barrier (void)
23b2ce53 5113{
cd459bf8 5114 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5115 INSN_UID (barrier) = cur_insn_uid++;
5116 add_insn (barrier);
5117 return barrier;
5118}
5119
5f2fc772 5120/* Emit a copy of note ORIG. */
502b8322 5121
66e8df53
DM
5122rtx_note *
5123emit_note_copy (rtx_note *orig)
5f2fc772 5124{
96fba521 5125 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5126 rtx_note *note = make_note_raw (kind);
5f2fc772 5127 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5128 add_insn (note);
2e040219 5129 return note;
23b2ce53
RS
5130}
5131
2e040219
NS
5132/* Make an insn of code NOTE or type NOTE_NO
5133 and add it to the end of the doubly-linked list. */
23b2ce53 5134
66e8df53 5135rtx_note *
a38e7aa5 5136emit_note (enum insn_note kind)
23b2ce53 5137{
66e8df53 5138 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5139 add_insn (note);
5140 return note;
5141}
5142
c41c1387
RS
5143/* Emit a clobber of lvalue X. */
5144
cd459bf8 5145rtx_insn *
c41c1387
RS
5146emit_clobber (rtx x)
5147{
5148 /* CONCATs should not appear in the insn stream. */
5149 if (GET_CODE (x) == CONCAT)
5150 {
5151 emit_clobber (XEXP (x, 0));
5152 return emit_clobber (XEXP (x, 1));
5153 }
5154 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5155}
5156
5157/* Return a sequence of insns to clobber lvalue X. */
5158
cd459bf8 5159rtx_insn *
c41c1387
RS
5160gen_clobber (rtx x)
5161{
cd459bf8 5162 rtx_insn *seq;
c41c1387
RS
5163
5164 start_sequence ();
5165 emit_clobber (x);
5166 seq = get_insns ();
5167 end_sequence ();
5168 return seq;
5169}
5170
5171/* Emit a use of rvalue X. */
5172
cd459bf8 5173rtx_insn *
c41c1387
RS
5174emit_use (rtx x)
5175{
5176 /* CONCATs should not appear in the insn stream. */
5177 if (GET_CODE (x) == CONCAT)
5178 {
5179 emit_use (XEXP (x, 0));
5180 return emit_use (XEXP (x, 1));
5181 }
5182 return emit_insn (gen_rtx_USE (VOIDmode, x));
5183}
5184
5185/* Return a sequence of insns to use rvalue X. */
5186
cd459bf8 5187rtx_insn *
c41c1387
RS
5188gen_use (rtx x)
5189{
cd459bf8 5190 rtx_insn *seq;
c41c1387
RS
5191
5192 start_sequence ();
5193 emit_use (x);
5194 seq = get_insns ();
5195 end_sequence ();
5196 return seq;
5197}
5198
c8912e53
RS
5199/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5200 Return the set in INSN that such notes describe, or NULL if the notes
5201 have no meaning for INSN. */
5202
5203rtx
5204set_for_reg_notes (rtx insn)
5205{
5206 rtx pat, reg;
5207
5208 if (!INSN_P (insn))
5209 return NULL_RTX;
5210
5211 pat = PATTERN (insn);
5212 if (GET_CODE (pat) == PARALLEL)
5213 {
5214 /* We do not use single_set because that ignores SETs of unused
5215 registers. REG_EQUAL and REG_EQUIV notes really do require the
5216 PARALLEL to have a single SET. */
5217 if (multiple_sets (insn))
5218 return NULL_RTX;
5219 pat = XVECEXP (pat, 0, 0);
5220 }
5221
5222 if (GET_CODE (pat) != SET)
5223 return NULL_RTX;
5224
5225 reg = SET_DEST (pat);
5226
5227 /* Notes apply to the contents of a STRICT_LOW_PART. */
7f7379f6
KV
5228 if (GET_CODE (reg) == STRICT_LOW_PART
5229 || GET_CODE (reg) == ZERO_EXTRACT)
c8912e53
RS
5230 reg = XEXP (reg, 0);
5231
5232 /* Check that we have a register. */
5233 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5234 return NULL_RTX;
5235
5236 return pat;
5237}
5238
87b47c85 5239/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5240 note of this type already exists, remove it first. */
87b47c85 5241
3d238248 5242rtx
502b8322 5243set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5244{
5245 rtx note = find_reg_note (insn, kind, NULL_RTX);
5246
52488da1
JW
5247 switch (kind)
5248 {
5249 case REG_EQUAL:
5250 case REG_EQUIV:
8073cbd4
EB
5251 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5252 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
c8912e53 5253 return NULL_RTX;
52488da1
JW
5254
5255 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5256 It serves no useful purpose and breaks eliminate_regs. */
5257 if (GET_CODE (datum) == ASM_OPERANDS)
5258 return NULL_RTX;
109374e2
RS
5259
5260 /* Notes with side effects are dangerous. Even if the side-effect
5261 initially mirrors one in PATTERN (INSN), later optimizations
5262 might alter the way that the final register value is calculated
5263 and so move or alter the side-effect in some way. The note would
5264 then no longer be a valid substitution for SET_SRC. */
5265 if (side_effects_p (datum))
5266 return NULL_RTX;
52488da1
JW
5267 break;
5268
5269 default:
5270 break;
5271 }
3d238248 5272
c8912e53
RS
5273 if (note)
5274 XEXP (note, 0) = datum;
5275 else
5276 {
5277 add_reg_note (insn, kind, datum);
5278 note = REG_NOTES (insn);
5279 }
6fb5fa3c
DB
5280
5281 switch (kind)
3d238248 5282 {
6fb5fa3c
DB
5283 case REG_EQUAL:
5284 case REG_EQUIV:
b2908ba6 5285 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5286 break;
5287 default:
5288 break;
3d238248 5289 }
87b47c85 5290
c8912e53 5291 return note;
87b47c85 5292}
7543f918
JR
5293
5294/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5295rtx
5296set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5297{
c8912e53 5298 rtx set = set_for_reg_notes (insn);
7543f918
JR
5299
5300 if (set && SET_DEST (set) == dst)
5301 return set_unique_reg_note (insn, kind, datum);
5302 return NULL_RTX;
5303}
23b2ce53 5304\f
9d8895c9
RS
5305/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5306 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5307 is true.
5308
23b2ce53
RS
5309 If X is a label, it is simply added into the insn chain. */
5310
cd459bf8 5311rtx_insn *
9d8895c9 5312emit (rtx x, bool allow_barrier_p)
23b2ce53
RS
5313{
5314 enum rtx_code code = classify_insn (x);
5315
5b0264cb 5316 switch (code)
23b2ce53 5317 {
5b0264cb
NS
5318 case CODE_LABEL:
5319 return emit_label (x);
5320 case INSN:
5321 return emit_insn (x);
5322 case JUMP_INSN:
5323 {
cd459bf8 5324 rtx_insn *insn = emit_jump_insn (x);
9d8895c9
RS
5325 if (allow_barrier_p
5326 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5b0264cb
NS
5327 return emit_barrier ();
5328 return insn;
5329 }
5330 case CALL_INSN:
5331 return emit_call_insn (x);
b5b8b0ac
AO
5332 case DEBUG_INSN:
5333 return emit_debug_insn (x);
5b0264cb
NS
5334 default:
5335 gcc_unreachable ();
23b2ce53 5336 }
23b2ce53
RS
5337}
5338\f
e2500fed 5339/* Space for free sequence stack entries. */
1431042e 5340static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5341
4dfa0342
RH
5342/* Begin emitting insns to a sequence. If this sequence will contain
5343 something that might cause the compiler to pop arguments to function
5344 calls (because those pops have previously been deferred; see
5345 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5346 before calling this function. That will ensure that the deferred
5347 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5348
5349void
502b8322 5350start_sequence (void)
23b2ce53
RS
5351{
5352 struct sequence_stack *tem;
5353
e2500fed
GK
5354 if (free_sequence_stack != NULL)
5355 {
5356 tem = free_sequence_stack;
5357 free_sequence_stack = tem->next;
5358 }
5359 else
766090c2 5360 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5361
614d5bd8 5362 tem->next = get_current_sequence ()->next;
5936d944
JH
5363 tem->first = get_insns ();
5364 tem->last = get_last_insn ();
614d5bd8 5365 get_current_sequence ()->next = tem;
23b2ce53 5366
5936d944
JH
5367 set_first_insn (0);
5368 set_last_insn (0);
23b2ce53
RS
5369}
5370
5c7a310f
MM
5371/* Set up the insn chain starting with FIRST as the current sequence,
5372 saving the previously current one. See the documentation for
5373 start_sequence for more information about how to use this function. */
23b2ce53
RS
5374
5375void
fee3e72c 5376push_to_sequence (rtx_insn *first)
23b2ce53 5377{
fee3e72c 5378 rtx_insn *last;
23b2ce53
RS
5379
5380 start_sequence ();
5381
e84a58ff
EB
5382 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5383 ;
23b2ce53 5384
5936d944
JH
5385 set_first_insn (first);
5386 set_last_insn (last);
23b2ce53
RS
5387}
5388
bb27eeda
SE
5389/* Like push_to_sequence, but take the last insn as an argument to avoid
5390 looping through the list. */
5391
5392void
fee3e72c 5393push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5394{
5395 start_sequence ();
5396
5936d944
JH
5397 set_first_insn (first);
5398 set_last_insn (last);
bb27eeda
SE
5399}
5400
f15ae3a1
TW
5401/* Set up the outer-level insn chain
5402 as the current sequence, saving the previously current one. */
5403
5404void
502b8322 5405push_topmost_sequence (void)
f15ae3a1 5406{
614d5bd8 5407 struct sequence_stack *top;
f15ae3a1
TW
5408
5409 start_sequence ();
5410
614d5bd8 5411 top = get_topmost_sequence ();
5936d944
JH
5412 set_first_insn (top->first);
5413 set_last_insn (top->last);
f15ae3a1
TW
5414}
5415
5416/* After emitting to the outer-level insn chain, update the outer-level
5417 insn chain, and restore the previous saved state. */
5418
5419void
502b8322 5420pop_topmost_sequence (void)
f15ae3a1 5421{
614d5bd8 5422 struct sequence_stack *top;
f15ae3a1 5423
614d5bd8 5424 top = get_topmost_sequence ();
5936d944
JH
5425 top->first = get_insns ();
5426 top->last = get_last_insn ();
f15ae3a1
TW
5427
5428 end_sequence ();
5429}
5430
23b2ce53
RS
5431/* After emitting to a sequence, restore previous saved state.
5432
5c7a310f 5433 To get the contents of the sequence just made, you must call
2f937369 5434 `get_insns' *before* calling here.
5c7a310f
MM
5435
5436 If the compiler might have deferred popping arguments while
5437 generating this sequence, and this sequence will not be immediately
5438 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5439 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5440 pops are inserted into this sequence, and not into some random
5441 location in the instruction stream. See INHIBIT_DEFER_POP for more
5442 information about deferred popping of arguments. */
23b2ce53
RS
5443
5444void
502b8322 5445end_sequence (void)
23b2ce53 5446{
614d5bd8 5447 struct sequence_stack *tem = get_current_sequence ()->next;
23b2ce53 5448
5936d944
JH
5449 set_first_insn (tem->first);
5450 set_last_insn (tem->last);
614d5bd8 5451 get_current_sequence ()->next = tem->next;
23b2ce53 5452
e2500fed
GK
5453 memset (tem, 0, sizeof (*tem));
5454 tem->next = free_sequence_stack;
5455 free_sequence_stack = tem;
23b2ce53
RS
5456}
5457
5458/* Return 1 if currently emitting into a sequence. */
5459
5460int
502b8322 5461in_sequence_p (void)
23b2ce53 5462{
614d5bd8 5463 return get_current_sequence ()->next != 0;
23b2ce53 5464}
23b2ce53 5465\f
59ec66dc
MM
5466/* Put the various virtual registers into REGNO_REG_RTX. */
5467
2bbdec73 5468static void
bd60bab2 5469init_virtual_regs (void)
59ec66dc 5470{
bd60bab2
JH
5471 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5472 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5473 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5474 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5475 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5476 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5477 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5478}
5479
da43a810
BS
5480\f
5481/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5482static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5483static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5484static int copy_insn_n_scratches;
5485
5486/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5487 copied an ASM_OPERANDS.
5488 In that case, it is the original input-operand vector. */
5489static rtvec orig_asm_operands_vector;
5490
5491/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5492 copied an ASM_OPERANDS.
5493 In that case, it is the copied input-operand vector. */
5494static rtvec copy_asm_operands_vector;
5495
5496/* Likewise for the constraints vector. */
5497static rtvec orig_asm_constraints_vector;
5498static rtvec copy_asm_constraints_vector;
5499
5500/* Recursively create a new copy of an rtx for copy_insn.
5501 This function differs from copy_rtx in that it handles SCRATCHes and
5502 ASM_OPERANDs properly.
5503 Normally, this function is not used directly; use copy_insn as front end.
5504 However, you could first copy an insn pattern with copy_insn and then use
5505 this function afterwards to properly copy any REG_NOTEs containing
5506 SCRATCHes. */
5507
5508rtx
502b8322 5509copy_insn_1 (rtx orig)
da43a810 5510{
b3694847
SS
5511 rtx copy;
5512 int i, j;
5513 RTX_CODE code;
5514 const char *format_ptr;
da43a810 5515
cd9c1ca8
RH
5516 if (orig == NULL)
5517 return NULL;
5518
da43a810
BS
5519 code = GET_CODE (orig);
5520
5521 switch (code)
5522 {
5523 case REG:
a52a87c3 5524 case DEBUG_EXPR:
d8116890 5525 CASE_CONST_ANY:
da43a810
BS
5526 case SYMBOL_REF:
5527 case CODE_LABEL:
5528 case PC:
5529 case CC0:
276e0224 5530 case RETURN:
26898771 5531 case SIMPLE_RETURN:
da43a810 5532 return orig;
3e89ed8d 5533 case CLOBBER:
c5c5ba89
JH
5534 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5535 clobbers or clobbers of hard registers that originated as pseudos.
5536 This is needed to allow safe register renaming. */
d7ae3739
EB
5537 if (REG_P (XEXP (orig, 0))
5538 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5539 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
3e89ed8d
JH
5540 return orig;
5541 break;
da43a810
BS
5542
5543 case SCRATCH:
5544 for (i = 0; i < copy_insn_n_scratches; i++)
5545 if (copy_insn_scratch_in[i] == orig)
5546 return copy_insn_scratch_out[i];
5547 break;
5548
5549 case CONST:
6fb5fa3c 5550 if (shared_const_p (orig))
da43a810
BS
5551 return orig;
5552 break;
750c9258 5553
da43a810
BS
5554 /* A MEM with a constant address is not sharable. The problem is that
5555 the constant address may need to be reloaded. If the mem is shared,
5556 then reloading one copy of this mem will cause all copies to appear
5557 to have been reloaded. */
5558
5559 default:
5560 break;
5561 }
5562
aacd3885
RS
5563 /* Copy the various flags, fields, and other information. We assume
5564 that all fields need copying, and then clear the fields that should
da43a810
BS
5565 not be copied. That is the sensible default behavior, and forces
5566 us to explicitly document why we are *not* copying a flag. */
aacd3885 5567 copy = shallow_copy_rtx (orig);
da43a810 5568
da43a810 5569 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5570 if (INSN_P (orig))
da43a810 5571 {
2adc7f12
JJ
5572 RTX_FLAG (copy, jump) = 0;
5573 RTX_FLAG (copy, call) = 0;
5574 RTX_FLAG (copy, frame_related) = 0;
da43a810 5575 }
750c9258 5576
da43a810
BS
5577 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5578
5579 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5580 switch (*format_ptr++)
5581 {
5582 case 'e':
5583 if (XEXP (orig, i) != NULL)
5584 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5585 break;
da43a810 5586
aacd3885
RS
5587 case 'E':
5588 case 'V':
5589 if (XVEC (orig, i) == orig_asm_constraints_vector)
5590 XVEC (copy, i) = copy_asm_constraints_vector;
5591 else if (XVEC (orig, i) == orig_asm_operands_vector)
5592 XVEC (copy, i) = copy_asm_operands_vector;
5593 else if (XVEC (orig, i) != NULL)
5594 {
5595 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5596 for (j = 0; j < XVECLEN (copy, i); j++)
5597 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5598 }
5599 break;
da43a810 5600
aacd3885
RS
5601 case 't':
5602 case 'w':
5603 case 'i':
5604 case 's':
5605 case 'S':
5606 case 'u':
5607 case '0':
5608 /* These are left unchanged. */
5609 break;
da43a810 5610
aacd3885
RS
5611 default:
5612 gcc_unreachable ();
5613 }
da43a810
BS
5614
5615 if (code == SCRATCH)
5616 {
5617 i = copy_insn_n_scratches++;
5b0264cb 5618 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5619 copy_insn_scratch_in[i] = orig;
5620 copy_insn_scratch_out[i] = copy;
5621 }
5622 else if (code == ASM_OPERANDS)
5623 {
6462bb43
AO
5624 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5625 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5626 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5627 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5628 }
5629
5630 return copy;
5631}
5632
5633/* Create a new copy of an rtx.
5634 This function differs from copy_rtx in that it handles SCRATCHes and
5635 ASM_OPERANDs properly.
5636 INSN doesn't really have to be a full INSN; it could be just the
5637 pattern. */
5638rtx
502b8322 5639copy_insn (rtx insn)
da43a810
BS
5640{
5641 copy_insn_n_scratches = 0;
5642 orig_asm_operands_vector = 0;
5643 orig_asm_constraints_vector = 0;
5644 copy_asm_operands_vector = 0;
5645 copy_asm_constraints_vector = 0;
5646 return copy_insn_1 (insn);
5647}
59ec66dc 5648
8e383849
JR
5649/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5650 on that assumption that INSN itself remains in its original place. */
5651
f8f0516e
DM
5652rtx_insn *
5653copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5654{
5655 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5656 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5657 INSN_UID (insn) = cur_insn_uid++;
5658 return insn;
5659}
5660
23b2ce53
RS
5661/* Initialize data structures and variables in this file
5662 before generating rtl for each function. */
5663
5664void
502b8322 5665init_emit (void)
23b2ce53 5666{
5936d944
JH
5667 set_first_insn (NULL);
5668 set_last_insn (NULL);
b5b8b0ac
AO
5669 if (MIN_NONDEBUG_INSN_UID)
5670 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5671 else
5672 cur_insn_uid = 1;
5673 cur_debug_insn_uid = 1;
23b2ce53 5674 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5675 first_label_num = label_num;
614d5bd8 5676 get_current_sequence ()->next = NULL;
23b2ce53 5677
23b2ce53
RS
5678 /* Init the tables that describe all the pseudo regs. */
5679
3e029763 5680 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5681
3e029763 5682 crtl->emit.regno_pointer_align
1b4572a8 5683 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5684
f44986d7
DM
5685 regno_reg_rtx
5686 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5687
e50126e8 5688 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5689 memcpy (regno_reg_rtx,
5fb0e246 5690 initial_regno_reg_rtx,
6cde4876 5691 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5692
23b2ce53 5693 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5694 init_virtual_regs ();
740ab4a2
RK
5695
5696 /* Indicate that the virtual registers and stack locations are
5697 all pointers. */
3502dc9c
JDA
5698 REG_POINTER (stack_pointer_rtx) = 1;
5699 REG_POINTER (frame_pointer_rtx) = 1;
5700 REG_POINTER (hard_frame_pointer_rtx) = 1;
5701 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5702
3502dc9c
JDA
5703 REG_POINTER (virtual_incoming_args_rtx) = 1;
5704 REG_POINTER (virtual_stack_vars_rtx) = 1;
5705 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5706 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5707 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5708
86fe05e0 5709#ifdef STACK_BOUNDARY
bdb429a5
RK
5710 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5711 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5712 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5713 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5714
5715 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5716 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5717 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5718 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5719 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5720#endif
5721
5e82e7bd
JVA
5722#ifdef INIT_EXPANDERS
5723 INIT_EXPANDERS;
5724#endif
23b2ce53
RS
5725}
5726
a73b091d 5727/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5728
5729static rtx
ef4bddc2 5730gen_const_vector (machine_mode mode, int constant)
69ef87e2
AH
5731{
5732 rtx tem;
5733 rtvec v;
5734 int units, i;
ef4bddc2 5735 machine_mode inner;
69ef87e2
AH
5736
5737 units = GET_MODE_NUNITS (mode);
5738 inner = GET_MODE_INNER (mode);
5739
15ed7b52
JG
5740 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5741
69ef87e2
AH
5742 v = rtvec_alloc (units);
5743
a73b091d
JW
5744 /* We need to call this function after we set the scalar const_tiny_rtx
5745 entries. */
5746 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5747
5748 for (i = 0; i < units; ++i)
a73b091d 5749 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5750
a06e3c40 5751 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5752 return tem;
5753}
5754
a06e3c40 5755/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5756 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5757rtx
ef4bddc2 5758gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
a06e3c40 5759{
ef4bddc2 5760 machine_mode inner = GET_MODE_INNER (mode);
a73b091d
JW
5761 int nunits = GET_MODE_NUNITS (mode);
5762 rtx x;
a06e3c40
R
5763 int i;
5764
a73b091d
JW
5765 /* Check to see if all of the elements have the same value. */
5766 x = RTVEC_ELT (v, nunits - 1);
5767 for (i = nunits - 2; i >= 0; i--)
5768 if (RTVEC_ELT (v, i) != x)
5769 break;
5770
5771 /* If the values are all the same, check to see if we can use one of the
5772 standard constant vectors. */
5773 if (i == -1)
5774 {
5775 if (x == CONST0_RTX (inner))
5776 return CONST0_RTX (mode);
5777 else if (x == CONST1_RTX (inner))
5778 return CONST1_RTX (mode);
e7c82a99
JJ
5779 else if (x == CONSTM1_RTX (inner))
5780 return CONSTM1_RTX (mode);
a73b091d
JW
5781 }
5782
5783 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5784}
5785
b5deb7b6
SL
5786/* Initialise global register information required by all functions. */
5787
5788void
5789init_emit_regs (void)
5790{
5791 int i;
ef4bddc2 5792 machine_mode mode;
1c3f523e 5793 mem_attrs *attrs;
b5deb7b6
SL
5794
5795 /* Reset register attributes */
aebf76a2 5796 reg_attrs_htab->empty ();
b5deb7b6
SL
5797
5798 /* We need reg_raw_mode, so initialize the modes now. */
5799 init_reg_modes_target ();
5800
5801 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5802 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5803 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5804 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5805 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5806 virtual_incoming_args_rtx =
5807 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5808 virtual_stack_vars_rtx =
5809 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5810 virtual_stack_dynamic_rtx =
5811 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5812 virtual_outgoing_args_rtx =
5813 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5814 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5815 virtual_preferred_stack_boundary_rtx =
5816 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5817
5818 /* Initialize RTL for commonly used hard registers. These are
5819 copied into regno_reg_rtx as we begin to compile each function. */
5820 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5821 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5822
5823#ifdef RETURN_ADDRESS_POINTER_REGNUM
5824 return_address_pointer_rtx
5825 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5826#endif
5827
ca72dad5 5828 pic_offset_table_rtx = NULL_RTX;
b5deb7b6
SL
5829 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5830 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
1c3f523e
RS
5831
5832 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5833 {
ef4bddc2 5834 mode = (machine_mode) i;
766090c2 5835 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
5836 attrs->align = BITS_PER_UNIT;
5837 attrs->addrspace = ADDR_SPACE_GENERIC;
5838 if (mode != BLKmode)
5839 {
754c3d5d
RS
5840 attrs->size_known_p = true;
5841 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5842 if (STRICT_ALIGNMENT)
5843 attrs->align = GET_MODE_ALIGNMENT (mode);
5844 }
5845 mode_mem_attrs[i] = attrs;
5846 }
b5deb7b6
SL
5847}
5848
aa3a12d6
RS
5849/* Initialize global machine_mode variables. */
5850
5851void
5852init_derived_machine_modes (void)
5853{
5854 byte_mode = VOIDmode;
5855 word_mode = VOIDmode;
5856
ef4bddc2 5857 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
aa3a12d6
RS
5858 mode != VOIDmode;
5859 mode = GET_MODE_WIDER_MODE (mode))
5860 {
5861 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5862 && byte_mode == VOIDmode)
5863 byte_mode = mode;
5864
5865 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5866 && word_mode == VOIDmode)
5867 word_mode = mode;
5868 }
5869
5870 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5871}
5872
2d888286 5873/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5874
5875void
2d888286 5876init_emit_once (void)
23b2ce53
RS
5877{
5878 int i;
ef4bddc2
RS
5879 machine_mode mode;
5880 machine_mode double_mode;
23b2ce53 5881
807e902e
KZ
5882 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5883 CONST_FIXED, and memory attribute hash tables. */
aebf76a2 5884 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
173b24b9 5885
807e902e 5886#if TARGET_SUPPORTS_WIDE_INT
aebf76a2 5887 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
807e902e 5888#endif
aebf76a2 5889 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5692c7bc 5890
aebf76a2 5891 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
091a3ac7 5892
aebf76a2 5893 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
67673f5c 5894
5da077de 5895#ifdef INIT_EXPANDERS
414c4dc4
NC
5896 /* This is to initialize {init|mark|free}_machine_status before the first
5897 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5898 end which calls push_function_context_to before the first call to
5da077de
AS
5899 init_function_start. */
5900 INIT_EXPANDERS;
5901#endif
5902
23b2ce53
RS
5903 /* Create the unique rtx's for certain rtx codes and operand values. */
5904
ecf835e9
KN
5905 /* Process stack-limiting command-line options. */
5906 if (opt_fstack_limit_symbol_arg != NULL)
5907 stack_limit_rtx
5908 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
5909 if (opt_fstack_limit_register_no >= 0)
5910 stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
5911
a2a8cc44 5912 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5913 tries to use these variables. */
23b2ce53 5914 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5915 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5916 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5917
68d75312
JC
5918 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5919 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5920 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5921 else
3b80f6ca 5922 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5923
aa3a12d6
RS
5924 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5925
807e902e
KZ
5926 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5927 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5928 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
5929
5930 dconstm1 = dconst1;
5931 dconstm1.sign = 1;
03f2ea93
RS
5932
5933 dconsthalf = dconst1;
1e92bbb9 5934 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5935
e7c82a99 5936 for (i = 0; i < 3; i++)
23b2ce53 5937 {
aefa9d43 5938 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5939 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5940
15ed7b52
JG
5941 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5942 mode != VOIDmode;
5943 mode = GET_MODE_WIDER_MODE (mode))
5944 const_tiny_rtx[i][(int) mode] =
555affd7 5945 const_double_from_real_value (*r, mode);
15ed7b52
JG
5946
5947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5948 mode != VOIDmode;
23b2ce53 5949 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc 5950 const_tiny_rtx[i][(int) mode] =
555affd7 5951 const_double_from_real_value (*r, mode);
23b2ce53 5952
906c4e36 5953 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5954
15ed7b52
JG
5955 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5956 mode != VOIDmode;
23b2ce53 5957 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5958 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5959
ede6c734
MS
5960 for (mode = MIN_MODE_PARTIAL_INT;
5961 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 5962 mode = (machine_mode)((int)(mode) + 1))
33d3e559 5963 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5964 }
5965
e7c82a99
JJ
5966 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5967
5968 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5969 mode != VOIDmode;
5970 mode = GET_MODE_WIDER_MODE (mode))
5971 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5972
ede6c734
MS
5973 for (mode = MIN_MODE_PARTIAL_INT;
5974 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 5975 mode = (machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5976 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5977
e90721b1
AP
5978 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5979 mode != VOIDmode;
5980 mode = GET_MODE_WIDER_MODE (mode))
5981 {
5982 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5983 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5984 }
5985
5986 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5987 mode != VOIDmode;
5988 mode = GET_MODE_WIDER_MODE (mode))
5989 {
5990 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5991 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5992 }
5993
69ef87e2
AH
5994 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5995 mode != VOIDmode;
5996 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5997 {
5998 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5999 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 6000 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 6001 }
69ef87e2
AH
6002
6003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6004 mode != VOIDmode;
6005 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
6006 {
6007 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6008 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6009 }
69ef87e2 6010
325217ed
CF
6011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6012 mode != VOIDmode;
6013 mode = GET_MODE_WIDER_MODE (mode))
6014 {
c3284718
RS
6015 FCONST0 (mode).data.high = 0;
6016 FCONST0 (mode).data.low = 0;
6017 FCONST0 (mode).mode = mode;
091a3ac7
CF
6018 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6019 FCONST0 (mode), mode);
325217ed
CF
6020 }
6021
6022 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6023 mode != VOIDmode;
6024 mode = GET_MODE_WIDER_MODE (mode))
6025 {
c3284718
RS
6026 FCONST0 (mode).data.high = 0;
6027 FCONST0 (mode).data.low = 0;
6028 FCONST0 (mode).mode = mode;
091a3ac7
CF
6029 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6030 FCONST0 (mode), mode);
325217ed
CF
6031 }
6032
6033 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6034 mode != VOIDmode;
6035 mode = GET_MODE_WIDER_MODE (mode))
6036 {
c3284718
RS
6037 FCONST0 (mode).data.high = 0;
6038 FCONST0 (mode).data.low = 0;
6039 FCONST0 (mode).mode = mode;
091a3ac7
CF
6040 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6041 FCONST0 (mode), mode);
325217ed
CF
6042
6043 /* We store the value 1. */
c3284718
RS
6044 FCONST1 (mode).data.high = 0;
6045 FCONST1 (mode).data.low = 0;
6046 FCONST1 (mode).mode = mode;
6047 FCONST1 (mode).data
9be0ac8c
LC
6048 = double_int_one.lshift (GET_MODE_FBIT (mode),
6049 HOST_BITS_PER_DOUBLE_INT,
6050 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6051 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6052 FCONST1 (mode), mode);
325217ed
CF
6053 }
6054
6055 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6056 mode != VOIDmode;
6057 mode = GET_MODE_WIDER_MODE (mode))
6058 {
c3284718
RS
6059 FCONST0 (mode).data.high = 0;
6060 FCONST0 (mode).data.low = 0;
6061 FCONST0 (mode).mode = mode;
091a3ac7
CF
6062 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6063 FCONST0 (mode), mode);
325217ed
CF
6064
6065 /* We store the value 1. */
c3284718
RS
6066 FCONST1 (mode).data.high = 0;
6067 FCONST1 (mode).data.low = 0;
6068 FCONST1 (mode).mode = mode;
6069 FCONST1 (mode).data
9be0ac8c
LC
6070 = double_int_one.lshift (GET_MODE_FBIT (mode),
6071 HOST_BITS_PER_DOUBLE_INT,
6072 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6073 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6074 FCONST1 (mode), mode);
6075 }
6076
6077 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6078 mode != VOIDmode;
6079 mode = GET_MODE_WIDER_MODE (mode))
6080 {
6081 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6082 }
6083
6084 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6085 mode != VOIDmode;
6086 mode = GET_MODE_WIDER_MODE (mode))
6087 {
6088 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6089 }
6090
6091 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6092 mode != VOIDmode;
6093 mode = GET_MODE_WIDER_MODE (mode))
6094 {
6095 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6096 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6097 }
6098
6099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6100 mode != VOIDmode;
6101 mode = GET_MODE_WIDER_MODE (mode))
6102 {
6103 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6104 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6105 }
6106
dbbbbf3b 6107 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
ef4bddc2 6108 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
dbbbbf3b 6109 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6110
f0417c82
RH
6111 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6112 if (STORE_FLAG_VALUE == 1)
6113 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91 6114
d5e254e1
IE
6115 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6116 mode != VOIDmode;
6117 mode = GET_MODE_WIDER_MODE (mode))
6118 {
6119 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6120 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6121 }
6122
ca4adc91
RS
6123 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6124 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6125 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6126 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
1476d1bd
MM
6127 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6128 /*prev_insn=*/NULL,
6129 /*next_insn=*/NULL,
6130 /*bb=*/NULL,
6131 /*pattern=*/NULL_RTX,
6132 /*location=*/-1,
6133 CODE_FOR_nothing,
6134 /*reg_notes=*/NULL_RTX);
23b2ce53 6135}
a11759a3 6136\f
969d70ca
JH
6137/* Produce exact duplicate of insn INSN after AFTER.
6138 Care updating of libcall regions if present. */
6139
cd459bf8 6140rtx_insn *
a1950df3 6141emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
969d70ca 6142{
cd459bf8
DM
6143 rtx_insn *new_rtx;
6144 rtx link;
969d70ca
JH
6145
6146 switch (GET_CODE (insn))
6147 {
6148 case INSN:
60564289 6149 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6150 break;
6151
6152 case JUMP_INSN:
60564289 6153 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6154 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6155 break;
6156
b5b8b0ac
AO
6157 case DEBUG_INSN:
6158 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6159 break;
6160
969d70ca 6161 case CALL_INSN:
60564289 6162 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6163 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6164 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6165 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6166 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6167 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6168 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6169 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6170 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6171 break;
6172
6173 default:
5b0264cb 6174 gcc_unreachable ();
969d70ca
JH
6175 }
6176
6177 /* Update LABEL_NUSES. */
60564289 6178 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6179
5368224f 6180 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6181
0a3d71f5
JW
6182 /* If the old insn is frame related, then so is the new one. This is
6183 primarily needed for IA-64 unwind info which marks epilogue insns,
6184 which may be duplicated by the basic block reordering code. */
60564289 6185 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6186
1581a12c
BS
6187 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6188 rtx *ptail = &REG_NOTES (new_rtx);
6189 while (*ptail != NULL_RTX)
6190 ptail = &XEXP (*ptail, 1);
6191
cf7c4aa6
HPN
6192 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6193 will make them. REG_LABEL_TARGETs are created there too, but are
6194 supposed to be sticky, so we copy them. */
969d70ca 6195 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6196 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca 6197 {
1581a12c
BS
6198 *ptail = duplicate_reg_note (link);
6199 ptail = &XEXP (*ptail, 1);
969d70ca
JH
6200 }
6201
60564289
KG
6202 INSN_CODE (new_rtx) = INSN_CODE (insn);
6203 return new_rtx;
969d70ca 6204}
e2500fed 6205
1431042e 6206static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d 6207rtx
ef4bddc2 6208gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
3e89ed8d
JH
6209{
6210 if (hard_reg_clobbers[mode][regno])
6211 return hard_reg_clobbers[mode][regno];
6212 else
6213 return (hard_reg_clobbers[mode][regno] =
6214 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6215}
6216
5368224f
DC
6217location_t prologue_location;
6218location_t epilogue_location;
78bde837
SB
6219
6220/* Hold current location information and last location information, so the
6221 datastructures are built lazily only when some instructions in given
6222 place are needed. */
3a50da34 6223static location_t curr_location;
78bde837 6224
5368224f 6225/* Allocate insn location datastructure. */
78bde837 6226void
5368224f 6227insn_locations_init (void)
78bde837 6228{
5368224f 6229 prologue_location = epilogue_location = 0;
78bde837 6230 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6231}
6232
6233/* At the end of emit stage, clear current location. */
6234void
5368224f 6235insn_locations_finalize (void)
78bde837 6236{
5368224f
DC
6237 epilogue_location = curr_location;
6238 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6239}
6240
6241/* Set current location. */
6242void
5368224f 6243set_curr_insn_location (location_t location)
78bde837 6244{
78bde837
SB
6245 curr_location = location;
6246}
6247
6248/* Get current location. */
6249location_t
5368224f 6250curr_insn_location (void)
78bde837
SB
6251{
6252 return curr_location;
6253}
6254
78bde837
SB
6255/* Return lexical scope block insn belongs to. */
6256tree
a1950df3 6257insn_scope (const rtx_insn *insn)
78bde837 6258{
5368224f 6259 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6260}
6261
6262/* Return line number of the statement that produced this insn. */
6263int
a1950df3 6264insn_line (const rtx_insn *insn)
78bde837 6265{
5368224f 6266 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6267}
6268
6269/* Return source file of the statement that produced this insn. */
6270const char *
a1950df3 6271insn_file (const rtx_insn *insn)
78bde837 6272{
5368224f 6273 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6274}
8930883e 6275
ffa4602f
EB
6276/* Return expanded location of the statement that produced this insn. */
6277expanded_location
a1950df3 6278insn_location (const rtx_insn *insn)
ffa4602f
EB
6279{
6280 return expand_location (INSN_LOCATION (insn));
6281}
6282
8930883e
MK
6283/* Return true if memory model MODEL requires a pre-operation (release-style)
6284 barrier or a post-operation (acquire-style) barrier. While not universal,
6285 this function matches behavior of several targets. */
6286
6287bool
6288need_atomic_barrier_p (enum memmodel model, bool pre)
6289{
40ad260d 6290 switch (model & MEMMODEL_BASE_MASK)
8930883e
MK
6291 {
6292 case MEMMODEL_RELAXED:
6293 case MEMMODEL_CONSUME:
6294 return false;
6295 case MEMMODEL_RELEASE:
6296 return pre;
6297 case MEMMODEL_ACQUIRE:
6298 return !pre;
6299 case MEMMODEL_ACQ_REL:
6300 case MEMMODEL_SEQ_CST:
6301 return true;
6302 default:
6303 gcc_unreachable ();
6304 }
6305}
8194c537
DM
6306
6307/* Initialize fields of rtl_data related to stack alignment. */
6308
6309void
6310rtl_data::init_stack_alignment ()
6311{
6312 stack_alignment_needed = STACK_BOUNDARY;
6313 max_used_stack_slot_alignment = STACK_BOUNDARY;
6314 stack_alignment_estimated = 0;
6315 preferred_stack_boundary = STACK_BOUNDARY;
6316}
6317
8930883e 6318\f
e2500fed 6319#include "gt-emit-rtl.h"