]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
arc.md (*storeqi_update): Use 'memory_operand' and fix RTL pattern to include the...
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6 36#include "coretypes.h"
c7131fb2 37#include "backend.h"
957060b5 38#include "target.h"
23b2ce53 39#include "rtl.h"
957060b5 40#include "tree.h"
c7131fb2 41#include "df.h"
957060b5
AM
42#include "tm_p.h"
43#include "stringpool.h"
957060b5
AM
44#include "insn-config.h"
45#include "regs.h"
46#include "emit-rtl.h"
47#include "recog.h"
c7131fb2 48#include "diagnostic-core.h"
40e23961 49#include "alias.h"
40e23961 50#include "fold-const.h"
d8a2d370 51#include "varasm.h"
60393bbc 52#include "cfgrtl.h"
60393bbc 53#include "tree-eh.h"
36566b39 54#include "explow.h"
23b2ce53 55#include "expr.h"
b5b8b0ac 56#include "params.h"
9b2b7279 57#include "builtins.h"
9021b8ec 58#include "rtl-iter.h"
1f9ceff1 59#include "stor-layout.h"
ca695ac9 60
5fb0e246
RS
61struct target_rtl default_target_rtl;
62#if SWITCHABLE_TARGET
63struct target_rtl *this_target_rtl = &default_target_rtl;
64#endif
65
66#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
67
1d445e9e
ILT
68/* Commonly used modes. */
69
ef4bddc2
RS
70machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
71machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
72machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
73machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 74
bd60bab2
JH
75/* Datastructures maintained for currently processed function in RTL form. */
76
3e029763 77struct rtl_data x_rtl;
bd60bab2
JH
78
79/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 80 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
81 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
82 with length attribute nested in top level structures. */
83
84rtx * regno_reg_rtx;
23b2ce53
RS
85
86/* This is *not* reset after each function. It gives each CODE_LABEL
87 in the entire compilation a unique label number. */
88
044b4de3 89static GTY(()) int label_num = 1;
23b2ce53 90
23b2ce53
RS
91/* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
93 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
94 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 95
e7c82a99 96rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 97
68d75312
JC
98rtx const_true_rtx;
99
23b2ce53
RS
100REAL_VALUE_TYPE dconst0;
101REAL_VALUE_TYPE dconst1;
102REAL_VALUE_TYPE dconst2;
103REAL_VALUE_TYPE dconstm1;
03f2ea93 104REAL_VALUE_TYPE dconsthalf;
23b2ce53 105
325217ed
CF
106/* Record fixed-point constant 0 and 1. */
107FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
108FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
109
23b2ce53
RS
110/* We make one copy of (const_int C) where C is in
111 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
112 to save space during the compilation and simplify comparisons of
113 integers. */
114
5da077de 115rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 116
ca4adc91
RS
117/* Standard pieces of rtx, to be substituted directly into things. */
118rtx pc_rtx;
119rtx ret_rtx;
120rtx simple_return_rtx;
121rtx cc0_rtx;
122
1476d1bd
MM
123/* Marker used for denoting an INSN, which should never be accessed (i.e.,
124 this pointer should normally never be dereferenced), but is required to be
125 distinct from NULL_RTX. Currently used by peephole2 pass. */
126rtx_insn *invalid_insn_rtx;
127
c13e8210
MM
128/* A hash table storing CONST_INTs whose absolute value is greater
129 than MAX_SAVED_CONST_INT. */
130
6c907cff 131struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
132{
133 typedef HOST_WIDE_INT compare_type;
134
135 static hashval_t hash (rtx i);
136 static bool equal (rtx i, HOST_WIDE_INT h);
137};
c13e8210 138
aebf76a2
TS
139static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
140
6c907cff 141struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
142{
143 static hashval_t hash (rtx x);
144 static bool equal (rtx x, rtx y);
145};
146
147static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
807e902e 148
a560d4d4 149/* A hash table storing register attribute structures. */
6c907cff 150struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
aebf76a2
TS
151{
152 static hashval_t hash (reg_attrs *x);
153 static bool equal (reg_attrs *a, reg_attrs *b);
154};
155
156static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
a560d4d4 157
5692c7bc 158/* A hash table storing all CONST_DOUBLEs. */
6c907cff 159struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
160{
161 static hashval_t hash (rtx x);
162 static bool equal (rtx x, rtx y);
163};
164
165static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
5692c7bc 166
091a3ac7 167/* A hash table storing all CONST_FIXEDs. */
6c907cff 168struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
169{
170 static hashval_t hash (rtx x);
171 static bool equal (rtx x, rtx y);
172};
173
174static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
091a3ac7 175
3e029763 176#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 177#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 178#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 179
5eb2a9f2 180static void set_used_decls (tree);
502b8322 181static void mark_label_nuses (rtx);
807e902e 182#if TARGET_SUPPORTS_WIDE_INT
807e902e
KZ
183static rtx lookup_const_wide_int (rtx);
184#endif
502b8322 185static rtx lookup_const_double (rtx);
091a3ac7 186static rtx lookup_const_fixed (rtx);
502b8322 187static reg_attrs *get_reg_attrs (tree, int);
ef4bddc2 188static rtx gen_const_vector (machine_mode, int);
32b32b16 189static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 190
6b24c259
JH
191/* Probability of the conditional branch currently proceeded by try_split.
192 Set to -1 otherwise. */
193int split_branch_probability = -1;
ca695ac9 194\f
c13e8210
MM
195/* Returns a hash code for X (which is a really a CONST_INT). */
196
aebf76a2
TS
197hashval_t
198const_int_hasher::hash (rtx x)
c13e8210 199{
aebf76a2 200 return (hashval_t) INTVAL (x);
c13e8210
MM
201}
202
cc2902df 203/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
204 CONST_INT) is the same as that given by Y (which is really a
205 HOST_WIDE_INT *). */
206
aebf76a2
TS
207bool
208const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
c13e8210 209{
aebf76a2 210 return (INTVAL (x) == y);
5692c7bc
ZW
211}
212
807e902e
KZ
213#if TARGET_SUPPORTS_WIDE_INT
214/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
215
aebf76a2
TS
216hashval_t
217const_wide_int_hasher::hash (rtx x)
807e902e
KZ
218{
219 int i;
d7ca26e4 220 unsigned HOST_WIDE_INT hash = 0;
aebf76a2 221 const_rtx xr = x;
807e902e
KZ
222
223 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
224 hash += CONST_WIDE_INT_ELT (xr, i);
225
226 return (hashval_t) hash;
227}
228
229/* Returns nonzero if the value represented by X (which is really a
230 CONST_WIDE_INT) is the same as that given by Y (which is really a
231 CONST_WIDE_INT). */
232
aebf76a2
TS
233bool
234const_wide_int_hasher::equal (rtx x, rtx y)
807e902e
KZ
235{
236 int i;
aebf76a2
TS
237 const_rtx xr = x;
238 const_rtx yr = y;
807e902e 239 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
aebf76a2 240 return false;
807e902e
KZ
241
242 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
243 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
aebf76a2 244 return false;
807e902e 245
aebf76a2 246 return true;
807e902e
KZ
247}
248#endif
249
5692c7bc 250/* Returns a hash code for X (which is really a CONST_DOUBLE). */
aebf76a2
TS
251hashval_t
252const_double_hasher::hash (rtx x)
5692c7bc 253{
aebf76a2 254 const_rtx const value = x;
46b33600 255 hashval_t h;
5692c7bc 256
807e902e 257 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
258 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
259 else
fe352c29 260 {
15c812e3 261 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
262 /* MODE is used in the comparison, so it should be in the hash. */
263 h ^= GET_MODE (value);
264 }
5692c7bc
ZW
265 return h;
266}
267
cc2902df 268/* Returns nonzero if the value represented by X (really a ...)
5692c7bc 269 is the same as that represented by Y (really a ...) */
aebf76a2
TS
270bool
271const_double_hasher::equal (rtx x, rtx y)
5692c7bc 272{
aebf76a2 273 const_rtx const a = x, b = y;
5692c7bc
ZW
274
275 if (GET_MODE (a) != GET_MODE (b))
276 return 0;
807e902e 277 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
278 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
279 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
280 else
281 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
282 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
283}
284
091a3ac7
CF
285/* Returns a hash code for X (which is really a CONST_FIXED). */
286
aebf76a2
TS
287hashval_t
288const_fixed_hasher::hash (rtx x)
091a3ac7 289{
aebf76a2 290 const_rtx const value = x;
091a3ac7
CF
291 hashval_t h;
292
293 h = fixed_hash (CONST_FIXED_VALUE (value));
294 /* MODE is used in the comparison, so it should be in the hash. */
295 h ^= GET_MODE (value);
296 return h;
297}
298
aebf76a2
TS
299/* Returns nonzero if the value represented by X is the same as that
300 represented by Y. */
091a3ac7 301
aebf76a2
TS
302bool
303const_fixed_hasher::equal (rtx x, rtx y)
091a3ac7 304{
aebf76a2 305 const_rtx const a = x, b = y;
091a3ac7
CF
306
307 if (GET_MODE (a) != GET_MODE (b))
308 return 0;
309 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
310}
311
f12144dd 312/* Return true if the given memory attributes are equal. */
c13e8210 313
96b3c03f 314bool
f12144dd 315mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 316{
96b3c03f
RB
317 if (p == q)
318 return true;
319 if (!p || !q)
320 return false;
754c3d5d
RS
321 return (p->alias == q->alias
322 && p->offset_known_p == q->offset_known_p
323 && (!p->offset_known_p || p->offset == q->offset)
324 && p->size_known_p == q->size_known_p
325 && (!p->size_known_p || p->size == q->size)
326 && p->align == q->align
09e881c9 327 && p->addrspace == q->addrspace
78b76d08
SB
328 && (p->expr == q->expr
329 || (p->expr != NULL_TREE && q->expr != NULL_TREE
330 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
331}
332
f12144dd 333/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 334
f12144dd
RS
335static void
336set_mem_attrs (rtx mem, mem_attrs *attrs)
337{
f12144dd
RS
338 /* If everything is the default, we can just clear the attributes. */
339 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
340 {
341 MEM_ATTRS (mem) = 0;
342 return;
343 }
173b24b9 344
84053e02
RB
345 if (!MEM_ATTRS (mem)
346 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 347 {
766090c2 348 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 349 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 350 }
c13e8210
MM
351}
352
a560d4d4
JH
353/* Returns a hash code for X (which is a really a reg_attrs *). */
354
aebf76a2
TS
355hashval_t
356reg_attr_hasher::hash (reg_attrs *x)
a560d4d4 357{
aebf76a2 358 const reg_attrs *const p = x;
a560d4d4 359
9841210f 360 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
361}
362
aebf76a2
TS
363/* Returns nonzero if the value represented by X is the same as that given by
364 Y. */
a560d4d4 365
aebf76a2
TS
366bool
367reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
a560d4d4 368{
aebf76a2
TS
369 const reg_attrs *const p = x;
370 const reg_attrs *const q = y;
a560d4d4
JH
371
372 return (p->decl == q->decl && p->offset == q->offset);
373}
374/* Allocate a new reg_attrs structure and insert it into the hash table if
375 one identical to it is not already in the table. We are doing this for
376 MEM of mode MODE. */
377
378static reg_attrs *
502b8322 379get_reg_attrs (tree decl, int offset)
a560d4d4
JH
380{
381 reg_attrs attrs;
a560d4d4
JH
382
383 /* If everything is the default, we can just return zero. */
384 if (decl == 0 && offset == 0)
385 return 0;
386
387 attrs.decl = decl;
388 attrs.offset = offset;
389
aebf76a2 390 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
a560d4d4
JH
391 if (*slot == 0)
392 {
766090c2 393 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
394 memcpy (*slot, &attrs, sizeof (reg_attrs));
395 }
396
aebf76a2 397 return *slot;
a560d4d4
JH
398}
399
6fb5fa3c
DB
400
401#if !HAVE_blockage
adddc347
HPN
402/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
403 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
404
405rtx
406gen_blockage (void)
407{
408 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
409 MEM_VOLATILE_P (x) = true;
410 return x;
411}
412#endif
413
414
8deccbb7
RS
415/* Set the mode and register number of X to MODE and REGNO. */
416
417void
418set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
419{
9188b286
RS
420 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
421 ? hard_regno_nregs[regno][mode]
422 : 1);
8deccbb7 423 PUT_MODE_RAW (x, mode);
9188b286 424 set_regno_raw (x, regno, nregs);
8deccbb7
RS
425}
426
08394eef
BS
427/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
428 don't attempt to share with the various global pieces of rtl (such as
429 frame_pointer_rtx). */
430
431rtx
8deccbb7 432gen_raw_REG (machine_mode mode, unsigned int regno)
08394eef 433{
2d44c7de 434 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
8deccbb7 435 set_mode_and_regno (x, mode, regno);
9fccb335 436 REG_ATTRS (x) = NULL;
08394eef
BS
437 ORIGINAL_REGNO (x) = regno;
438 return x;
439}
440
c5c76735
JL
441/* There are some RTL codes that require special attention; the generation
442 functions do the raw handling. If you add to this list, modify
443 special_rtx in gengenrtl.c as well. */
444
38e60c55 445rtx_expr_list *
ef4bddc2 446gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
38e60c55
DM
447{
448 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
449 expr_list));
450}
451
a756c6be 452rtx_insn_list *
ef4bddc2 453gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
a756c6be
DM
454{
455 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
456 insn_list));
457}
458
d6e1e8b8 459rtx_insn *
ef4bddc2 460gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
d6e1e8b8
DM
461 basic_block bb, rtx pattern, int location, int code,
462 rtx reg_notes)
463{
464 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
465 prev_insn, next_insn,
466 bb, pattern, location, code,
467 reg_notes));
468}
469
3b80f6ca 470rtx
ef4bddc2 471gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca
RH
472{
473 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 474 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
475
476#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
477 if (const_true_rtx && arg == STORE_FLAG_VALUE)
478 return const_true_rtx;
479#endif
480
c13e8210 481 /* Look up the CONST_INT in the hash table. */
aebf76a2
TS
482 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
483 INSERT);
29105cea 484 if (*slot == 0)
1f8f4a0b 485 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210 486
aebf76a2 487 return *slot;
3b80f6ca
RH
488}
489
2496c7bd 490rtx
ef4bddc2 491gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
2496c7bd
LB
492{
493 return GEN_INT (trunc_int_for_mode (c, mode));
494}
495
5692c7bc
ZW
496/* CONST_DOUBLEs might be created from pairs of integers, or from
497 REAL_VALUE_TYPEs. Also, their length is known only at run time,
498 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
499
500/* Determine whether REAL, a CONST_DOUBLE, already exists in the
501 hash table. If so, return its counterpart; otherwise add it
502 to the hash table and return it. */
503static rtx
502b8322 504lookup_const_double (rtx real)
5692c7bc 505{
aebf76a2 506 rtx *slot = const_double_htab->find_slot (real, INSERT);
5692c7bc
ZW
507 if (*slot == 0)
508 *slot = real;
509
aebf76a2 510 return *slot;
5692c7bc 511}
29105cea 512
5692c7bc
ZW
513/* Return a CONST_DOUBLE rtx for a floating-point value specified by
514 VALUE in mode MODE. */
0133b7d9 515rtx
ef4bddc2 516const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
0133b7d9 517{
5692c7bc
ZW
518 rtx real = rtx_alloc (CONST_DOUBLE);
519 PUT_MODE (real, mode);
520
9e254451 521 real->u.rv = value;
5692c7bc
ZW
522
523 return lookup_const_double (real);
524}
525
091a3ac7
CF
526/* Determine whether FIXED, a CONST_FIXED, already exists in the
527 hash table. If so, return its counterpart; otherwise add it
528 to the hash table and return it. */
529
530static rtx
531lookup_const_fixed (rtx fixed)
532{
aebf76a2 533 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
091a3ac7
CF
534 if (*slot == 0)
535 *slot = fixed;
536
aebf76a2 537 return *slot;
091a3ac7
CF
538}
539
540/* Return a CONST_FIXED rtx for a fixed-point value specified by
541 VALUE in mode MODE. */
542
543rtx
ef4bddc2 544const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
091a3ac7
CF
545{
546 rtx fixed = rtx_alloc (CONST_FIXED);
547 PUT_MODE (fixed, mode);
548
549 fixed->u.fv = value;
550
551 return lookup_const_fixed (fixed);
552}
553
807e902e 554#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
555/* Constructs double_int from rtx CST. */
556
557double_int
558rtx_to_double_int (const_rtx cst)
559{
560 double_int r;
561
562 if (CONST_INT_P (cst))
27bcd47c 563 r = double_int::from_shwi (INTVAL (cst));
48175537 564 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
565 {
566 r.low = CONST_DOUBLE_LOW (cst);
567 r.high = CONST_DOUBLE_HIGH (cst);
568 }
569 else
570 gcc_unreachable ();
571
572 return r;
573}
807e902e 574#endif
3e93ff81 575
807e902e
KZ
576#if TARGET_SUPPORTS_WIDE_INT
577/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
578 If so, return its counterpart; otherwise add it to the hash table and
579 return it. */
3e93ff81 580
807e902e
KZ
581static rtx
582lookup_const_wide_int (rtx wint)
583{
aebf76a2 584 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
807e902e
KZ
585 if (*slot == 0)
586 *slot = wint;
587
aebf76a2 588 return *slot;
807e902e
KZ
589}
590#endif
591
592/* Return an rtx constant for V, given that the constant has mode MODE.
593 The returned rtx will be a CONST_INT if V fits, otherwise it will be
594 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
595 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0
AS
596
597rtx
ef4bddc2 598immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
54fb1ae0 599{
807e902e
KZ
600 unsigned int len = v.get_len ();
601 unsigned int prec = GET_MODE_PRECISION (mode);
602
603 /* Allow truncation but not extension since we do not know if the
604 number is signed or unsigned. */
605 gcc_assert (prec <= v.get_precision ());
606
607 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
608 return gen_int_mode (v.elt (0), mode);
609
610#if TARGET_SUPPORTS_WIDE_INT
611 {
612 unsigned int i;
613 rtx value;
614 unsigned int blocks_needed
615 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
616
617 if (len > blocks_needed)
618 len = blocks_needed;
619
620 value = const_wide_int_alloc (len);
621
622 /* It is so tempting to just put the mode in here. Must control
623 myself ... */
624 PUT_MODE (value, VOIDmode);
625 CWI_PUT_NUM_ELEM (value, len);
626
627 for (i = 0; i < len; i++)
628 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
629
630 return lookup_const_wide_int (value);
631 }
632#else
633 return immed_double_const (v.elt (0), v.elt (1), mode);
634#endif
54fb1ae0
AS
635}
636
807e902e 637#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
638/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
639 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 640 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
641 implied upper bits are copies of the high bit of i1. The value
642 itself is neither signed nor unsigned. Do not use this routine for
643 non-integer modes; convert to REAL_VALUE_TYPE and use
555affd7 644 const_double_from_real_value. */
5692c7bc
ZW
645
646rtx
ef4bddc2 647immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
5692c7bc
ZW
648{
649 rtx value;
650 unsigned int i;
651
65acccdd 652 /* There are the following cases (note that there are no modes with
49ab6098 653 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
654
655 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
656 gen_int_mode.
929e10f4
MS
657 2) If the value of the integer fits into HOST_WIDE_INT anyway
658 (i.e., i1 consists only from copies of the sign bit, and sign
659 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 660 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
661 if (mode != VOIDmode)
662 {
5b0264cb
NS
663 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
664 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
665 /* We can get a 0 for an error mark. */
666 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
d5e254e1
IE
667 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
668 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
5692c7bc 669
65acccdd
ZD
670 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
671 return gen_int_mode (i0, mode);
5692c7bc
ZW
672 }
673
674 /* If this integer fits in one word, return a CONST_INT. */
675 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
676 return GEN_INT (i0);
677
678 /* We use VOIDmode for integers. */
679 value = rtx_alloc (CONST_DOUBLE);
680 PUT_MODE (value, VOIDmode);
681
682 CONST_DOUBLE_LOW (value) = i0;
683 CONST_DOUBLE_HIGH (value) = i1;
684
685 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
686 XWINT (value, i) = 0;
687
688 return lookup_const_double (value);
0133b7d9 689}
807e902e 690#endif
0133b7d9 691
3b80f6ca 692rtx
ef4bddc2 693gen_rtx_REG (machine_mode mode, unsigned int regno)
3b80f6ca
RH
694{
695 /* In case the MD file explicitly references the frame pointer, have
696 all such references point to the same frame pointer. This is
697 used during frame pointer elimination to distinguish the explicit
698 references to these registers from pseudos that happened to be
699 assigned to them.
700
701 If we have eliminated the frame pointer or arg pointer, we will
702 be using it as a normal register, for example as a spill
703 register. In such cases, we might be accessing it in a mode that
704 is not Pmode and therefore cannot use the pre-allocated rtx.
705
706 Also don't do this when we are making new REGs in reload, since
707 we don't want to get confused with the real pointers. */
708
55a2c322 709 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 710 {
e10c79fe
LB
711 if (regno == FRAME_POINTER_REGNUM
712 && (!reload_completed || frame_pointer_needed))
3b80f6ca 713 return frame_pointer_rtx;
c3e08036
TS
714
715 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
716 && regno == HARD_FRAME_POINTER_REGNUM
e10c79fe 717 && (!reload_completed || frame_pointer_needed))
3b80f6ca 718 return hard_frame_pointer_rtx;
3f393fc6
TS
719#if !HARD_FRAME_POINTER_IS_ARG_POINTER
720 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
721 && regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
722 return arg_pointer_rtx;
723#endif
724#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 725 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
726 return return_address_pointer_rtx;
727#endif
fc555370 728 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 729 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 730 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 731 return pic_offset_table_rtx;
bcb33994 732 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
733 return stack_pointer_rtx;
734 }
735
006a94b0 736#if 0
6cde4876 737 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
738 an existing entry in that table to avoid useless generation of RTL.
739
740 This code is disabled for now until we can fix the various backends
741 which depend on having non-shared hard registers in some cases. Long
742 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
743 on the amount of useless RTL that gets generated.
744
745 We'll also need to fix some code that runs after reload that wants to
746 set ORIGINAL_REGNO. */
747
6cde4876
JL
748 if (cfun
749 && cfun->emit
750 && regno_reg_rtx
751 && regno < FIRST_PSEUDO_REGISTER
752 && reg_raw_mode[regno] == mode)
753 return regno_reg_rtx[regno];
006a94b0 754#endif
6cde4876 755
08394eef 756 return gen_raw_REG (mode, regno);
3b80f6ca
RH
757}
758
41472af8 759rtx
ef4bddc2 760gen_rtx_MEM (machine_mode mode, rtx addr)
41472af8
MM
761{
762 rtx rt = gen_rtx_raw_MEM (mode, addr);
763
764 /* This field is not cleared by the mere allocation of the rtx, so
765 we clear it here. */
173b24b9 766 MEM_ATTRS (rt) = 0;
41472af8
MM
767
768 return rt;
769}
ddef6bc7 770
542a8afa
RH
771/* Generate a memory referring to non-trapping constant memory. */
772
773rtx
ef4bddc2 774gen_const_mem (machine_mode mode, rtx addr)
542a8afa
RH
775{
776 rtx mem = gen_rtx_MEM (mode, addr);
777 MEM_READONLY_P (mem) = 1;
778 MEM_NOTRAP_P (mem) = 1;
779 return mem;
780}
781
bf877a76
R
782/* Generate a MEM referring to fixed portions of the frame, e.g., register
783 save areas. */
784
785rtx
ef4bddc2 786gen_frame_mem (machine_mode mode, rtx addr)
bf877a76
R
787{
788 rtx mem = gen_rtx_MEM (mode, addr);
789 MEM_NOTRAP_P (mem) = 1;
790 set_mem_alias_set (mem, get_frame_alias_set ());
791 return mem;
792}
793
794/* Generate a MEM referring to a temporary use of the stack, not part
795 of the fixed stack frame. For example, something which is pushed
796 by a target splitter. */
797rtx
ef4bddc2 798gen_tmp_stack_mem (machine_mode mode, rtx addr)
bf877a76
R
799{
800 rtx mem = gen_rtx_MEM (mode, addr);
801 MEM_NOTRAP_P (mem) = 1;
e3b5732b 802 if (!cfun->calls_alloca)
bf877a76
R
803 set_mem_alias_set (mem, get_frame_alias_set ());
804 return mem;
805}
806
beb72684
RH
807/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
808 this construct would be valid, and false otherwise. */
809
810bool
ef4bddc2 811validate_subreg (machine_mode omode, machine_mode imode,
ed7a4b4b 812 const_rtx reg, unsigned int offset)
ddef6bc7 813{
beb72684
RH
814 unsigned int isize = GET_MODE_SIZE (imode);
815 unsigned int osize = GET_MODE_SIZE (omode);
816
817 /* All subregs must be aligned. */
818 if (offset % osize != 0)
819 return false;
820
821 /* The subreg offset cannot be outside the inner object. */
822 if (offset >= isize)
823 return false;
824
825 /* ??? This should not be here. Temporarily continue to allow word_mode
826 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
827 Generally, backends are doing something sketchy but it'll take time to
828 fix them all. */
829 if (omode == word_mode)
830 ;
831 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
832 is the culprit here, and not the backends. */
833 else if (osize >= UNITS_PER_WORD && isize >= osize)
834 ;
835 /* Allow component subregs of complex and vector. Though given the below
836 extraction rules, it's not always clear what that means. */
837 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
838 && GET_MODE_INNER (imode) == omode)
839 ;
840 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
841 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
842 represent this. It's questionable if this ought to be represented at
843 all -- why can't this all be hidden in post-reload splitters that make
844 arbitrarily mode changes to the registers themselves. */
845 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
846 ;
847 /* Subregs involving floating point modes are not allowed to
848 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
849 (subreg:SI (reg:DF) 0) isn't. */
850 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
851 {
55a2c322
VM
852 if (! (isize == osize
853 /* LRA can use subreg to store a floating point value in
854 an integer mode. Although the floating point and the
855 integer modes need the same number of hard registers,
856 the size of floating point mode can be less than the
857 integer mode. LRA also uses subregs for a register
858 should be used in different mode in on insn. */
859 || lra_in_progress))
beb72684
RH
860 return false;
861 }
ddef6bc7 862
beb72684
RH
863 /* Paradoxical subregs must have offset zero. */
864 if (osize > isize)
865 return offset == 0;
866
867 /* This is a normal subreg. Verify that the offset is representable. */
868
869 /* For hard registers, we already have most of these rules collected in
870 subreg_offset_representable_p. */
871 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
872 {
873 unsigned int regno = REGNO (reg);
874
875#ifdef CANNOT_CHANGE_MODE_CLASS
876 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
877 && GET_MODE_INNER (imode) == omode)
878 ;
879 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
880 return false;
ddef6bc7 881#endif
beb72684
RH
882
883 return subreg_offset_representable_p (regno, imode, offset, omode);
884 }
885
886 /* For pseudo registers, we want most of the same checks. Namely:
887 If the register no larger than a word, the subreg must be lowpart.
888 If the register is larger than a word, the subreg must be the lowpart
889 of a subword. A subreg does *not* perform arbitrary bit extraction.
890 Given that we've already checked mode/offset alignment, we only have
891 to check subword subregs here. */
55a2c322
VM
892 if (osize < UNITS_PER_WORD
893 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684 894 {
ef4bddc2 895 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
beb72684
RH
896 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
897 if (offset % UNITS_PER_WORD != low_off)
898 return false;
899 }
900 return true;
901}
902
903rtx
ef4bddc2 904gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
beb72684
RH
905{
906 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 907 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
908}
909
173b24b9
RK
910/* Generate a SUBREG representing the least-significant part of REG if MODE
911 is smaller than mode of REG, otherwise paradoxical SUBREG. */
912
ddef6bc7 913rtx
ef4bddc2 914gen_lowpart_SUBREG (machine_mode mode, rtx reg)
ddef6bc7 915{
ef4bddc2 916 machine_mode inmode;
ddef6bc7
JJ
917
918 inmode = GET_MODE (reg);
919 if (inmode == VOIDmode)
920 inmode = mode;
e0e08ac2
JH
921 return gen_rtx_SUBREG (mode, reg,
922 subreg_lowpart_offset (mode, inmode));
ddef6bc7 923}
fcc74520
RS
924
925rtx
ef4bddc2 926gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
fcc74520
RS
927 enum var_init_status status)
928{
929 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
930 PAT_VAR_LOCATION_STATUS (x) = status;
931 return x;
932}
c5c76735 933\f
23b2ce53 934
80379f51
PB
935/* Create an rtvec and stores within it the RTXen passed in the arguments. */
936
23b2ce53 937rtvec
e34d07f2 938gen_rtvec (int n, ...)
23b2ce53 939{
80379f51
PB
940 int i;
941 rtvec rt_val;
e34d07f2 942 va_list p;
23b2ce53 943
e34d07f2 944 va_start (p, n);
23b2ce53 945
80379f51 946 /* Don't allocate an empty rtvec... */
23b2ce53 947 if (n == 0)
0edf1bb2
JL
948 {
949 va_end (p);
950 return NULL_RTVEC;
951 }
23b2ce53 952
80379f51 953 rt_val = rtvec_alloc (n);
4f90e4a0 954
23b2ce53 955 for (i = 0; i < n; i++)
80379f51 956 rt_val->elem[i] = va_arg (p, rtx);
6268b922 957
e34d07f2 958 va_end (p);
80379f51 959 return rt_val;
23b2ce53
RS
960}
961
962rtvec
502b8322 963gen_rtvec_v (int n, rtx *argp)
23b2ce53 964{
b3694847
SS
965 int i;
966 rtvec rt_val;
23b2ce53 967
80379f51 968 /* Don't allocate an empty rtvec... */
23b2ce53 969 if (n == 0)
80379f51 970 return NULL_RTVEC;
23b2ce53 971
80379f51 972 rt_val = rtvec_alloc (n);
23b2ce53
RS
973
974 for (i = 0; i < n; i++)
8f985ec4 975 rt_val->elem[i] = *argp++;
23b2ce53
RS
976
977 return rt_val;
978}
e6eda746
DM
979
980rtvec
981gen_rtvec_v (int n, rtx_insn **argp)
982{
983 int i;
984 rtvec rt_val;
985
986 /* Don't allocate an empty rtvec... */
987 if (n == 0)
988 return NULL_RTVEC;
989
990 rt_val = rtvec_alloc (n);
991
992 for (i = 0; i < n; i++)
993 rt_val->elem[i] = *argp++;
994
995 return rt_val;
996}
997
23b2ce53 998\f
38ae7651
RS
999/* Return the number of bytes between the start of an OUTER_MODE
1000 in-memory value and the start of an INNER_MODE in-memory value,
1001 given that the former is a lowpart of the latter. It may be a
1002 paradoxical lowpart, in which case the offset will be negative
1003 on big-endian targets. */
1004
1005int
ef4bddc2
RS
1006byte_lowpart_offset (machine_mode outer_mode,
1007 machine_mode inner_mode)
38ae7651
RS
1008{
1009 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1010 return subreg_lowpart_offset (outer_mode, inner_mode);
1011 else
1012 return -subreg_lowpart_offset (inner_mode, outer_mode);
1013}
1014\f
23b2ce53
RS
1015/* Generate a REG rtx for a new pseudo register of mode MODE.
1016 This pseudo is assigned the next sequential register number. */
1017
1018rtx
ef4bddc2 1019gen_reg_rtx (machine_mode mode)
23b2ce53 1020{
b3694847 1021 rtx val;
2e3f842f 1022 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 1023
f8335a4f 1024 gcc_assert (can_create_pseudo_p ());
23b2ce53 1025
2e3f842f
L
1026 /* If a virtual register with bigger mode alignment is generated,
1027 increase stack alignment estimation because it might be spilled
1028 to stack later. */
b8698a0f 1029 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
1030 && crtl->stack_alignment_estimated < align
1031 && !crtl->stack_realign_processed)
ae58e548
JJ
1032 {
1033 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1034 if (crtl->stack_alignment_estimated < min_align)
1035 crtl->stack_alignment_estimated = min_align;
1036 }
2e3f842f 1037
1b3d8f8a
GK
1038 if (generating_concat_p
1039 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1040 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
1041 {
1042 /* For complex modes, don't make a single pseudo.
1043 Instead, make a CONCAT of two pseudos.
1044 This allows noncontiguous allocation of the real and imaginary parts,
1045 which makes much better code. Besides, allocating DCmode
1046 pseudos overstrains reload on some machines like the 386. */
1047 rtx realpart, imagpart;
ef4bddc2 1048 machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1049
1050 realpart = gen_reg_rtx (partmode);
1051 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1052 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1053 }
1054
004a7e45
UB
1055 /* Do not call gen_reg_rtx with uninitialized crtl. */
1056 gcc_assert (crtl->emit.regno_pointer_align_length);
1057
a560d4d4 1058 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 1059 enough to have an element for this pseudo reg number. */
23b2ce53 1060
3e029763 1061 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 1062 {
3e029763 1063 int old_size = crtl->emit.regno_pointer_align_length;
60564289 1064 char *tmp;
0d4903b8 1065 rtx *new1;
0d4903b8 1066
60564289
KG
1067 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1068 memset (tmp + old_size, 0, old_size);
1069 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 1070
1b4572a8 1071 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 1072 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
1073 regno_reg_rtx = new1;
1074
3e029763 1075 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
1076 }
1077
08394eef 1078 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
1079 regno_reg_rtx[reg_rtx_no++] = val;
1080 return val;
1081}
1082
a698cc03
JL
1083/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1084
1085bool
1086reg_is_parm_p (rtx reg)
1087{
1088 tree decl;
1089
1090 gcc_assert (REG_P (reg));
1091 decl = REG_EXPR (reg);
1092 return (decl && TREE_CODE (decl) == PARM_DECL);
1093}
1094
38ae7651
RS
1095/* Update NEW with the same attributes as REG, but with OFFSET added
1096 to the REG_OFFSET. */
a560d4d4 1097
e53a16e7 1098static void
60564289 1099update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 1100{
60564289 1101 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 1102 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1103}
1104
38ae7651
RS
1105/* Generate a register with same attributes as REG, but with OFFSET
1106 added to the REG_OFFSET. */
e53a16e7
ILT
1107
1108rtx
ef4bddc2 1109gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
e53a16e7
ILT
1110 int offset)
1111{
60564289 1112 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1113
60564289
KG
1114 update_reg_offset (new_rtx, reg, offset);
1115 return new_rtx;
e53a16e7
ILT
1116}
1117
1118/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1119 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1120
1121rtx
ef4bddc2 1122gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
e53a16e7 1123{
60564289 1124 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1125
60564289
KG
1126 update_reg_offset (new_rtx, reg, offset);
1127 return new_rtx;
a560d4d4
JH
1128}
1129
38ae7651
RS
1130/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1131 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1132
1133void
ef4bddc2 1134adjust_reg_mode (rtx reg, machine_mode mode)
a560d4d4 1135{
38ae7651
RS
1136 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1137 PUT_MODE (reg, mode);
1138}
1139
1140/* Copy REG's attributes from X, if X has any attributes. If REG and X
1141 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1142
1143void
1144set_reg_attrs_from_value (rtx reg, rtx x)
1145{
1146 int offset;
de6f3f7a
L
1147 bool can_be_reg_pointer = true;
1148
1149 /* Don't call mark_reg_pointer for incompatible pointer sign
1150 extension. */
1151 while (GET_CODE (x) == SIGN_EXTEND
1152 || GET_CODE (x) == ZERO_EXTEND
1153 || GET_CODE (x) == TRUNCATE
1154 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1155 {
2a870875
RS
1156#if defined(POINTERS_EXTEND_UNSIGNED)
1157 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1158 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1159 && !targetm.have_ptr_extend ())
de6f3f7a
L
1160 can_be_reg_pointer = false;
1161#endif
1162 x = XEXP (x, 0);
1163 }
38ae7651 1164
923ba36f
JJ
1165 /* Hard registers can be reused for multiple purposes within the same
1166 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1167 on them is wrong. */
1168 if (HARD_REGISTER_P (reg))
1169 return;
1170
38ae7651 1171 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1172 if (MEM_P (x))
1173 {
527210c4
RS
1174 if (MEM_OFFSET_KNOWN_P (x))
1175 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1176 MEM_OFFSET (x) + offset);
de6f3f7a 1177 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1178 mark_reg_pointer (reg, 0);
46b71b03
PB
1179 }
1180 else if (REG_P (x))
1181 {
1182 if (REG_ATTRS (x))
1183 update_reg_offset (reg, x, offset);
de6f3f7a 1184 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1185 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1186 }
1187}
1188
1189/* Generate a REG rtx for a new pseudo register, copying the mode
1190 and attributes from X. */
1191
1192rtx
1193gen_reg_rtx_and_attrs (rtx x)
1194{
1195 rtx reg = gen_reg_rtx (GET_MODE (x));
1196 set_reg_attrs_from_value (reg, x);
1197 return reg;
a560d4d4
JH
1198}
1199
9d18e06b
JZ
1200/* Set the register attributes for registers contained in PARM_RTX.
1201 Use needed values from memory attributes of MEM. */
1202
1203void
502b8322 1204set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1205{
f8cfc6aa 1206 if (REG_P (parm_rtx))
38ae7651 1207 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1208 else if (GET_CODE (parm_rtx) == PARALLEL)
1209 {
1210 /* Check for a NULL entry in the first slot, used to indicate that the
1211 parameter goes both on the stack and in registers. */
1212 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1213 for (; i < XVECLEN (parm_rtx, 0); i++)
1214 {
1215 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1216 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1217 REG_ATTRS (XEXP (x, 0))
1218 = get_reg_attrs (MEM_EXPR (mem),
1219 INTVAL (XEXP (x, 1)));
1220 }
1221 }
1222}
1223
38ae7651
RS
1224/* Set the REG_ATTRS for registers in value X, given that X represents
1225 decl T. */
a560d4d4 1226
4e3825db 1227void
38ae7651
RS
1228set_reg_attrs_for_decl_rtl (tree t, rtx x)
1229{
1f9ceff1
AO
1230 if (!t)
1231 return;
1232 tree tdecl = t;
38ae7651 1233 if (GET_CODE (x) == SUBREG)
fbe6ec81 1234 {
38ae7651
RS
1235 gcc_assert (subreg_lowpart_p (x));
1236 x = SUBREG_REG (x);
fbe6ec81 1237 }
f8cfc6aa 1238 if (REG_P (x))
38ae7651
RS
1239 REG_ATTRS (x)
1240 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1f9ceff1
AO
1241 DECL_P (tdecl)
1242 ? DECL_MODE (tdecl)
1243 : TYPE_MODE (TREE_TYPE (tdecl))));
a560d4d4
JH
1244 if (GET_CODE (x) == CONCAT)
1245 {
1246 if (REG_P (XEXP (x, 0)))
1247 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1248 if (REG_P (XEXP (x, 1)))
1249 REG_ATTRS (XEXP (x, 1))
1250 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1251 }
1252 if (GET_CODE (x) == PARALLEL)
1253 {
d4afac5b
JZ
1254 int i, start;
1255
1256 /* Check for a NULL entry, used to indicate that the parameter goes
1257 both on the stack and in registers. */
1258 if (XEXP (XVECEXP (x, 0, 0), 0))
1259 start = 0;
1260 else
1261 start = 1;
1262
1263 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1264 {
1265 rtx y = XVECEXP (x, 0, i);
1266 if (REG_P (XEXP (y, 0)))
1267 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1268 }
1269 }
1270}
1271
38ae7651
RS
1272/* Assign the RTX X to declaration T. */
1273
1274void
1275set_decl_rtl (tree t, rtx x)
1276{
1277 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1278 if (x)
1279 set_reg_attrs_for_decl_rtl (t, x);
1280}
1281
5141868d
RS
1282/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1283 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1284
1285void
5141868d 1286set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1287{
1288 DECL_INCOMING_RTL (t) = x;
5141868d 1289 if (x && !by_reference_p)
38ae7651
RS
1290 set_reg_attrs_for_decl_rtl (t, x);
1291}
1292
754fdcca
RK
1293/* Identify REG (which may be a CONCAT) as a user register. */
1294
1295void
502b8322 1296mark_user_reg (rtx reg)
754fdcca
RK
1297{
1298 if (GET_CODE (reg) == CONCAT)
1299 {
1300 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1301 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1302 }
754fdcca 1303 else
5b0264cb
NS
1304 {
1305 gcc_assert (REG_P (reg));
1306 REG_USERVAR_P (reg) = 1;
1307 }
754fdcca
RK
1308}
1309
86fe05e0
RK
1310/* Identify REG as a probable pointer register and show its alignment
1311 as ALIGN, if nonzero. */
23b2ce53
RS
1312
1313void
502b8322 1314mark_reg_pointer (rtx reg, int align)
23b2ce53 1315{
3502dc9c 1316 if (! REG_POINTER (reg))
00995e78 1317 {
3502dc9c 1318 REG_POINTER (reg) = 1;
86fe05e0 1319
00995e78
RE
1320 if (align)
1321 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1322 }
1323 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1324 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1325 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1326}
1327
1328/* Return 1 plus largest pseudo reg number used in the current function. */
1329
1330int
502b8322 1331max_reg_num (void)
23b2ce53
RS
1332{
1333 return reg_rtx_no;
1334}
1335
1336/* Return 1 + the largest label number used so far in the current function. */
1337
1338int
502b8322 1339max_label_num (void)
23b2ce53 1340{
23b2ce53
RS
1341 return label_num;
1342}
1343
1344/* Return first label number used in this function (if any were used). */
1345
1346int
502b8322 1347get_first_label_num (void)
23b2ce53
RS
1348{
1349 return first_label_num;
1350}
6de9cd9a
DN
1351
1352/* If the rtx for label was created during the expansion of a nested
1353 function, then first_label_num won't include this label number.
fa10beec 1354 Fix this now so that array indices work later. */
6de9cd9a
DN
1355
1356void
1357maybe_set_first_label_num (rtx x)
1358{
1359 if (CODE_LABEL_NUMBER (x) < first_label_num)
1360 first_label_num = CODE_LABEL_NUMBER (x);
1361}
23b2ce53
RS
1362\f
1363/* Return a value representing some low-order bits of X, where the number
1364 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1365 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1366 representation is returned.
1367
1368 This function handles the cases in common between gen_lowpart, below,
1369 and two variants in cse.c and combine.c. These are the cases that can
1370 be safely handled at all points in the compilation.
1371
1372 If this is not a case we can handle, return 0. */
1373
1374rtx
ef4bddc2 1375gen_lowpart_common (machine_mode mode, rtx x)
23b2ce53 1376{
ddef6bc7 1377 int msize = GET_MODE_SIZE (mode);
550d1387 1378 int xsize;
ef4bddc2 1379 machine_mode innermode;
550d1387
GK
1380
1381 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1382 so we have to make one up. Yuk. */
1383 innermode = GET_MODE (x);
481683e1 1384 if (CONST_INT_P (x)
db487452 1385 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1386 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1387 else if (innermode == VOIDmode)
49ab6098 1388 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1389
550d1387
GK
1390 xsize = GET_MODE_SIZE (innermode);
1391
5b0264cb 1392 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1393
550d1387 1394 if (innermode == mode)
23b2ce53
RS
1395 return x;
1396
1397 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1398 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1399 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1400 return 0;
1401
53501a19 1402 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1403 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1404 return 0;
1405
23b2ce53 1406 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1407 && (GET_MODE_CLASS (mode) == MODE_INT
1408 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1409 {
1410 /* If we are getting the low-order part of something that has been
1411 sign- or zero-extended, we can either just use the object being
1412 extended or make a narrower extension. If we want an even smaller
1413 piece than the size of the object being extended, call ourselves
1414 recursively.
1415
1416 This case is used mostly by combine and cse. */
1417
1418 if (GET_MODE (XEXP (x, 0)) == mode)
1419 return XEXP (x, 0);
550d1387 1420 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1421 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1422 else if (msize < xsize)
3b80f6ca 1423 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1424 }
f8cfc6aa 1425 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1426 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1427 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
3403a1a9 1428 return lowpart_subreg (mode, x, innermode);
8aada4ad 1429
23b2ce53
RS
1430 /* Otherwise, we can't do this. */
1431 return 0;
1432}
1433\f
ccba022b 1434rtx
ef4bddc2 1435gen_highpart (machine_mode mode, rtx x)
ccba022b 1436{
ddef6bc7 1437 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1438 rtx result;
ddef6bc7 1439
ccba022b
RS
1440 /* This case loses if X is a subreg. To catch bugs early,
1441 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1442 gcc_assert (msize <= UNITS_PER_WORD
1443 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1444
e0e08ac2
JH
1445 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1446 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1447 gcc_assert (result);
b8698a0f 1448
09482e0d
JW
1449 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1450 the target if we have a MEM. gen_highpart must return a valid operand,
1451 emitting code if necessary to do so. */
5b0264cb
NS
1452 if (MEM_P (result))
1453 {
1454 result = validize_mem (result);
1455 gcc_assert (result);
1456 }
b8698a0f 1457
e0e08ac2
JH
1458 return result;
1459}
5222e470 1460
26d249eb 1461/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1462 be VOIDmode constant. */
1463rtx
ef4bddc2 1464gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
5222e470
JH
1465{
1466 if (GET_MODE (exp) != VOIDmode)
1467 {
5b0264cb 1468 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1469 return gen_highpart (outermode, exp);
1470 }
1471 return simplify_gen_subreg (outermode, exp, innermode,
1472 subreg_highpart_offset (outermode, innermode));
1473}
68252e27 1474
38ae7651 1475/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1476
e0e08ac2 1477unsigned int
ef4bddc2 1478subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
e0e08ac2
JH
1479{
1480 unsigned int offset = 0;
1481 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1482
e0e08ac2 1483 if (difference > 0)
ccba022b 1484 {
e0e08ac2
JH
1485 if (WORDS_BIG_ENDIAN)
1486 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1487 if (BYTES_BIG_ENDIAN)
1488 offset += difference % UNITS_PER_WORD;
ccba022b 1489 }
ddef6bc7 1490
e0e08ac2 1491 return offset;
ccba022b 1492}
eea50aa0 1493
e0e08ac2
JH
1494/* Return offset in bytes to get OUTERMODE high part
1495 of the value in mode INNERMODE stored in memory in target format. */
1496unsigned int
ef4bddc2 1497subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
eea50aa0
JH
1498{
1499 unsigned int offset = 0;
1500 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1501
5b0264cb 1502 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1503
eea50aa0
JH
1504 if (difference > 0)
1505 {
e0e08ac2 1506 if (! WORDS_BIG_ENDIAN)
eea50aa0 1507 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1508 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1509 offset += difference % UNITS_PER_WORD;
1510 }
1511
e0e08ac2 1512 return offset;
eea50aa0 1513}
ccba022b 1514
23b2ce53
RS
1515/* Return 1 iff X, assumed to be a SUBREG,
1516 refers to the least significant part of its containing reg.
1517 If X is not a SUBREG, always return 1 (it is its own low part!). */
1518
1519int
fa233e34 1520subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1521{
1522 if (GET_CODE (x) != SUBREG)
1523 return 1;
a3a03040
RK
1524 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1525 return 0;
23b2ce53 1526
e0e08ac2
JH
1527 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1528 == SUBREG_BYTE (x));
23b2ce53 1529}
6a4bdc79
BS
1530
1531/* Return true if X is a paradoxical subreg, false otherwise. */
1532bool
1533paradoxical_subreg_p (const_rtx x)
1534{
1535 if (GET_CODE (x) != SUBREG)
1536 return false;
1537 return (GET_MODE_PRECISION (GET_MODE (x))
1538 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1539}
23b2ce53 1540\f
ddef6bc7
JJ
1541/* Return subword OFFSET of operand OP.
1542 The word number, OFFSET, is interpreted as the word number starting
1543 at the low-order address. OFFSET 0 is the low-order word if not
1544 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1545
1546 If we cannot extract the required word, we return zero. Otherwise,
1547 an rtx corresponding to the requested word will be returned.
1548
1549 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1550 reload has completed, a valid address will always be returned. After
1551 reload, if a valid address cannot be returned, we return zero.
1552
1553 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1554 it is the responsibility of the caller.
1555
1556 MODE is the mode of OP in case it is a CONST_INT.
1557
1558 ??? This is still rather broken for some cases. The problem for the
1559 moment is that all callers of this thing provide no 'goal mode' to
1560 tell us to work with. This exists because all callers were written
0631e0bf
JH
1561 in a word based SUBREG world.
1562 Now use of this function can be deprecated by simplify_subreg in most
1563 cases.
1564 */
ddef6bc7
JJ
1565
1566rtx
ef4bddc2 1567operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
ddef6bc7
JJ
1568{
1569 if (mode == VOIDmode)
1570 mode = GET_MODE (op);
1571
5b0264cb 1572 gcc_assert (mode != VOIDmode);
ddef6bc7 1573
30f7a378 1574 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1575 if (mode != BLKmode
1576 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1577 return 0;
1578
30f7a378 1579 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1580 if (mode != BLKmode
1581 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1582 return const0_rtx;
1583
ddef6bc7 1584 /* Form a new MEM at the requested address. */
3c0cb5de 1585 if (MEM_P (op))
ddef6bc7 1586 {
60564289 1587 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1588
f1ec5147 1589 if (! validate_address)
60564289 1590 return new_rtx;
f1ec5147
RK
1591
1592 else if (reload_completed)
ddef6bc7 1593 {
09e881c9
BE
1594 if (! strict_memory_address_addr_space_p (word_mode,
1595 XEXP (new_rtx, 0),
1596 MEM_ADDR_SPACE (op)))
f1ec5147 1597 return 0;
ddef6bc7 1598 }
f1ec5147 1599 else
60564289 1600 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1601 }
1602
0631e0bf
JH
1603 /* Rest can be handled by simplify_subreg. */
1604 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1605}
1606
535a42b1
NS
1607/* Similar to `operand_subword', but never return 0. If we can't
1608 extract the required subword, put OP into a register and try again.
1609 The second attempt must succeed. We always validate the address in
1610 this case.
23b2ce53
RS
1611
1612 MODE is the mode of OP, in case it is CONST_INT. */
1613
1614rtx
ef4bddc2 1615operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
23b2ce53 1616{
ddef6bc7 1617 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1618
1619 if (result)
1620 return result;
1621
1622 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1623 {
1624 /* If this is a register which can not be accessed by words, copy it
1625 to a pseudo register. */
f8cfc6aa 1626 if (REG_P (op))
77e6b0eb
JC
1627 op = copy_to_reg (op);
1628 else
1629 op = force_reg (mode, op);
1630 }
23b2ce53 1631
ddef6bc7 1632 result = operand_subword (op, offset, 1, mode);
5b0264cb 1633 gcc_assert (result);
23b2ce53
RS
1634
1635 return result;
1636}
1637\f
2b3493c8
AK
1638/* Returns 1 if both MEM_EXPR can be considered equal
1639 and 0 otherwise. */
1640
1641int
4f588890 1642mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1643{
1644 if (expr1 == expr2)
1645 return 1;
1646
1647 if (! expr1 || ! expr2)
1648 return 0;
1649
1650 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1651 return 0;
1652
55b34b5f 1653 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1654}
1655
805903b5
JJ
1656/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1657 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1658 -1 if not known. */
1659
1660int
d9223014 1661get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1662{
1663 tree expr;
1664 unsigned HOST_WIDE_INT offset;
1665
1666 /* This function can't use
527210c4 1667 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1668 || (MAX (MEM_ALIGN (mem),
0eb77834 1669 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1670 < align))
1671 return -1;
1672 else
527210c4 1673 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1674 for two reasons:
1675 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1676 for <variable>. get_inner_reference doesn't handle it and
1677 even if it did, the alignment in that case needs to be determined
1678 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1679 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1680 isn't sufficiently aligned, the object it is in might be. */
1681 gcc_assert (MEM_P (mem));
1682 expr = MEM_EXPR (mem);
527210c4 1683 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1684 return -1;
1685
527210c4 1686 offset = MEM_OFFSET (mem);
805903b5
JJ
1687 if (DECL_P (expr))
1688 {
1689 if (DECL_ALIGN (expr) < align)
1690 return -1;
1691 }
1692 else if (INDIRECT_REF_P (expr))
1693 {
1694 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1695 return -1;
1696 }
1697 else if (TREE_CODE (expr) == COMPONENT_REF)
1698 {
1699 while (1)
1700 {
1701 tree inner = TREE_OPERAND (expr, 0);
1702 tree field = TREE_OPERAND (expr, 1);
1703 tree byte_offset = component_ref_field_offset (expr);
1704 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1705
1706 if (!byte_offset
cc269bb6
RS
1707 || !tree_fits_uhwi_p (byte_offset)
1708 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1709 return -1;
1710
ae7e9ddd
RS
1711 offset += tree_to_uhwi (byte_offset);
1712 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1713
1714 if (inner == NULL_TREE)
1715 {
1716 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1717 < (unsigned int) align)
1718 return -1;
1719 break;
1720 }
1721 else if (DECL_P (inner))
1722 {
1723 if (DECL_ALIGN (inner) < align)
1724 return -1;
1725 break;
1726 }
1727 else if (TREE_CODE (inner) != COMPONENT_REF)
1728 return -1;
1729 expr = inner;
1730 }
1731 }
1732 else
1733 return -1;
1734
1735 return offset & ((align / BITS_PER_UNIT) - 1);
1736}
1737
6926c713 1738/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1739 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1740 if we are making a new object of this type. BITPOS is nonzero if
1741 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1742
1743void
502b8322
AJ
1744set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1745 HOST_WIDE_INT bitpos)
173b24b9 1746{
6f1087be 1747 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1748 tree type;
f12144dd 1749 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1750 addr_space_t as;
173b24b9
RK
1751
1752 /* It can happen that type_for_mode was given a mode for which there
1753 is no language-level type. In which case it returns NULL, which
1754 we can see here. */
1755 if (t == NULL_TREE)
1756 return;
1757
1758 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1759 if (type == error_mark_node)
1760 return;
173b24b9 1761
173b24b9
RK
1762 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1763 wrong answer, as it assumes that DECL_RTL already has the right alias
1764 info. Callers should not set DECL_RTL until after the call to
1765 set_mem_attributes. */
5b0264cb 1766 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1767
f12144dd
RS
1768 memset (&attrs, 0, sizeof (attrs));
1769
738cc472 1770 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1771 front-end routine) and use it. */
f12144dd 1772 attrs.alias = get_alias_set (t);
173b24b9 1773
a5e9c810 1774 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1775 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1776
268f7033 1777 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1778 refattrs = MEM_ATTRS (ref);
1779 if (refattrs)
268f7033
UW
1780 {
1781 /* ??? Can this ever happen? Calling this routine on a MEM that
1782 already carries memory attributes should probably be invalid. */
f12144dd 1783 attrs.expr = refattrs->expr;
754c3d5d 1784 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1785 attrs.offset = refattrs->offset;
754c3d5d 1786 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1787 attrs.size = refattrs->size;
1788 attrs.align = refattrs->align;
268f7033
UW
1789 }
1790
1791 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1792 else
268f7033 1793 {
f12144dd
RS
1794 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1795 gcc_assert (!defattrs->expr);
754c3d5d 1796 gcc_assert (!defattrs->offset_known_p);
f12144dd 1797
268f7033 1798 /* Respect mode size. */
754c3d5d 1799 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1800 attrs.size = defattrs->size;
268f7033
UW
1801 /* ??? Is this really necessary? We probably should always get
1802 the size from the type below. */
1803
1804 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1805 if T is an object, always compute the object alignment below. */
f12144dd
RS
1806 if (TYPE_P (t))
1807 attrs.align = defattrs->align;
1808 else
1809 attrs.align = BITS_PER_UNIT;
268f7033
UW
1810 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1811 e.g. if the type carries an alignment attribute. Should we be
1812 able to simply always use TYPE_ALIGN? */
1813 }
1814
c3d32120
RK
1815 /* We can set the alignment from the type if we are making an object,
1816 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1817 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1818 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1819
738cc472 1820 /* If the size is known, we can set that. */
a787ccc3 1821 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1822
30b0317c
RB
1823 /* The address-space is that of the type. */
1824 as = TYPE_ADDR_SPACE (type);
1825
80965c18
RK
1826 /* If T is not a type, we may be able to deduce some more information about
1827 the expression. */
1828 if (! TYPE_P (t))
8ac61af7 1829 {
8476af98 1830 tree base;
389fdba0 1831
8ac61af7
RK
1832 if (TREE_THIS_VOLATILE (t))
1833 MEM_VOLATILE_P (ref) = 1;
173b24b9 1834
c56e3582
RK
1835 /* Now remove any conversions: they don't change what the underlying
1836 object is. Likewise for SAVE_EXPR. */
1043771b 1837 while (CONVERT_EXPR_P (t)
c56e3582
RK
1838 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1839 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1840 t = TREE_OPERAND (t, 0);
1841
4994da65
RG
1842 /* Note whether this expression can trap. */
1843 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1844
1845 base = get_base_address (t);
f18a7b25
MJ
1846 if (base)
1847 {
1848 if (DECL_P (base)
1849 && TREE_READONLY (base)
1850 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1851 && !TREE_THIS_VOLATILE (base))
1852 MEM_READONLY_P (ref) = 1;
1853
1854 /* Mark static const strings readonly as well. */
1855 if (TREE_CODE (base) == STRING_CST
1856 && TREE_READONLY (base)
1857 && TREE_STATIC (base))
1858 MEM_READONLY_P (ref) = 1;
1859
30b0317c 1860 /* Address-space information is on the base object. */
f18a7b25
MJ
1861 if (TREE_CODE (base) == MEM_REF
1862 || TREE_CODE (base) == TARGET_MEM_REF)
1863 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1864 0))));
1865 else
1866 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1867 }
ba30e50d 1868
2039d7aa
RH
1869 /* If this expression uses it's parent's alias set, mark it such
1870 that we won't change it. */
b4ada065 1871 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
1872 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1873
8ac61af7
RK
1874 /* If this is a decl, set the attributes of the MEM from it. */
1875 if (DECL_P (t))
1876 {
f12144dd 1877 attrs.expr = t;
754c3d5d
RS
1878 attrs.offset_known_p = true;
1879 attrs.offset = 0;
6f1087be 1880 apply_bitpos = bitpos;
a787ccc3 1881 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1882 }
1883
30b0317c 1884 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1885 else if (CONSTANT_CLASS_P (t))
30b0317c 1886 ;
998d7deb 1887
a787ccc3
RS
1888 /* If this is a field reference, record it. */
1889 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1890 {
f12144dd 1891 attrs.expr = t;
754c3d5d
RS
1892 attrs.offset_known_p = true;
1893 attrs.offset = 0;
6f1087be 1894 apply_bitpos = bitpos;
a787ccc3
RS
1895 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1896 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1897 }
1898
1899 /* If this is an array reference, look for an outer field reference. */
1900 else if (TREE_CODE (t) == ARRAY_REF)
1901 {
1902 tree off_tree = size_zero_node;
1b1838b6
JW
1903 /* We can't modify t, because we use it at the end of the
1904 function. */
1905 tree t2 = t;
998d7deb
RH
1906
1907 do
1908 {
1b1838b6 1909 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1910 tree low_bound = array_ref_low_bound (t2);
1911 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1912
1913 /* We assume all arrays have sizes that are a multiple of a byte.
1914 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1915 index, then convert to sizetype and multiply by the size of
1916 the array element. */
1917 if (! integer_zerop (low_bound))
4845b383
KH
1918 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1919 index, low_bound);
2567406a 1920
44de5aeb 1921 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1922 size_binop (MULT_EXPR,
1923 fold_convert (sizetype,
1924 index),
44de5aeb
RK
1925 unit_size),
1926 off_tree);
1b1838b6 1927 t2 = TREE_OPERAND (t2, 0);
998d7deb 1928 }
1b1838b6 1929 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1930
30b0317c
RB
1931 if (DECL_P (t2)
1932 || TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1933 {
f12144dd 1934 attrs.expr = t2;
754c3d5d 1935 attrs.offset_known_p = false;
cc269bb6 1936 if (tree_fits_uhwi_p (off_tree))
6f1087be 1937 {
754c3d5d 1938 attrs.offset_known_p = true;
ae7e9ddd 1939 attrs.offset = tree_to_uhwi (off_tree);
6f1087be
RH
1940 apply_bitpos = bitpos;
1941 }
998d7deb 1942 }
30b0317c 1943 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1944 }
1945
56c47f22 1946 /* If this is an indirect reference, record it. */
70f34814 1947 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1948 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1949 {
f12144dd 1950 attrs.expr = t;
754c3d5d
RS
1951 attrs.offset_known_p = true;
1952 attrs.offset = 0;
56c47f22
RG
1953 apply_bitpos = bitpos;
1954 }
1955
30b0317c
RB
1956 /* Compute the alignment. */
1957 unsigned int obj_align;
1958 unsigned HOST_WIDE_INT obj_bitpos;
1959 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1960 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1961 if (obj_bitpos != 0)
1962 obj_align = (obj_bitpos & -obj_bitpos);
1963 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
1964 }
1965
cc269bb6 1966 if (tree_fits_uhwi_p (new_size))
a787ccc3
RS
1967 {
1968 attrs.size_known_p = true;
ae7e9ddd 1969 attrs.size = tree_to_uhwi (new_size);
a787ccc3
RS
1970 }
1971
15c812e3 1972 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1973 bit position offset. Similarly, increase the size of the accessed
1974 object to contain the negative offset. */
6f1087be 1975 if (apply_bitpos)
8c317c5f 1976 {
754c3d5d
RS
1977 gcc_assert (attrs.offset_known_p);
1978 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1979 if (attrs.size_known_p)
1980 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1981 }
6f1087be 1982
8ac61af7 1983 /* Now set the attributes we computed above. */
f18a7b25 1984 attrs.addrspace = as;
f12144dd 1985 set_mem_attrs (ref, &attrs);
173b24b9
RK
1986}
1987
6f1087be 1988void
502b8322 1989set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1990{
1991 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1992}
1993
173b24b9
RK
1994/* Set the alias set of MEM to SET. */
1995
1996void
4862826d 1997set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1998{
f12144dd
RS
1999 struct mem_attrs attrs;
2000
173b24b9 2001 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 2002 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
2003 attrs = *get_mem_attrs (mem);
2004 attrs.alias = set;
2005 set_mem_attrs (mem, &attrs);
09e881c9
BE
2006}
2007
2008/* Set the address space of MEM to ADDRSPACE (target-defined). */
2009
2010void
2011set_mem_addr_space (rtx mem, addr_space_t addrspace)
2012{
f12144dd
RS
2013 struct mem_attrs attrs;
2014
2015 attrs = *get_mem_attrs (mem);
2016 attrs.addrspace = addrspace;
2017 set_mem_attrs (mem, &attrs);
173b24b9 2018}
738cc472 2019
d022d93e 2020/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
2021
2022void
502b8322 2023set_mem_align (rtx mem, unsigned int align)
738cc472 2024{
f12144dd
RS
2025 struct mem_attrs attrs;
2026
2027 attrs = *get_mem_attrs (mem);
2028 attrs.align = align;
2029 set_mem_attrs (mem, &attrs);
738cc472 2030}
1285011e 2031
998d7deb 2032/* Set the expr for MEM to EXPR. */
1285011e
RK
2033
2034void
502b8322 2035set_mem_expr (rtx mem, tree expr)
1285011e 2036{
f12144dd
RS
2037 struct mem_attrs attrs;
2038
2039 attrs = *get_mem_attrs (mem);
2040 attrs.expr = expr;
2041 set_mem_attrs (mem, &attrs);
1285011e 2042}
998d7deb
RH
2043
2044/* Set the offset of MEM to OFFSET. */
2045
2046void
527210c4 2047set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 2048{
f12144dd
RS
2049 struct mem_attrs attrs;
2050
2051 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2052 attrs.offset_known_p = true;
2053 attrs.offset = offset;
527210c4
RS
2054 set_mem_attrs (mem, &attrs);
2055}
2056
2057/* Clear the offset of MEM. */
2058
2059void
2060clear_mem_offset (rtx mem)
2061{
2062 struct mem_attrs attrs;
2063
2064 attrs = *get_mem_attrs (mem);
754c3d5d 2065 attrs.offset_known_p = false;
f12144dd 2066 set_mem_attrs (mem, &attrs);
35aff10b
AM
2067}
2068
2069/* Set the size of MEM to SIZE. */
2070
2071void
f5541398 2072set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 2073{
f12144dd
RS
2074 struct mem_attrs attrs;
2075
2076 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2077 attrs.size_known_p = true;
2078 attrs.size = size;
f5541398
RS
2079 set_mem_attrs (mem, &attrs);
2080}
2081
2082/* Clear the size of MEM. */
2083
2084void
2085clear_mem_size (rtx mem)
2086{
2087 struct mem_attrs attrs;
2088
2089 attrs = *get_mem_attrs (mem);
754c3d5d 2090 attrs.size_known_p = false;
f12144dd 2091 set_mem_attrs (mem, &attrs);
998d7deb 2092}
173b24b9 2093\f
738cc472
RK
2094/* Return a memory reference like MEMREF, but with its mode changed to MODE
2095 and its address changed to ADDR. (VOIDmode means don't change the mode.
2096 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2097 returned memory location is required to be valid. INPLACE is true if any
2098 changes can be made directly to MEMREF or false if MEMREF must be treated
2099 as immutable.
2100
2101 The memory attributes are not changed. */
23b2ce53 2102
738cc472 2103static rtx
ef4bddc2 2104change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
23b33725 2105 bool inplace)
23b2ce53 2106{
09e881c9 2107 addr_space_t as;
60564289 2108 rtx new_rtx;
23b2ce53 2109
5b0264cb 2110 gcc_assert (MEM_P (memref));
09e881c9 2111 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2112 if (mode == VOIDmode)
2113 mode = GET_MODE (memref);
2114 if (addr == 0)
2115 addr = XEXP (memref, 0);
a74ff877 2116 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2117 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2118 return memref;
23b2ce53 2119
91c5ee5b
VM
2120 /* Don't validate address for LRA. LRA can make the address valid
2121 by itself in most efficient way. */
2122 if (validate && !lra_in_progress)
23b2ce53 2123 {
f1ec5147 2124 if (reload_in_progress || reload_completed)
09e881c9 2125 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2126 else
09e881c9 2127 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2128 }
750c9258 2129
9b04c6a8
RK
2130 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2131 return memref;
2132
23b33725
RS
2133 if (inplace)
2134 {
2135 XEXP (memref, 0) = addr;
2136 return memref;
2137 }
2138
60564289
KG
2139 new_rtx = gen_rtx_MEM (mode, addr);
2140 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2141 return new_rtx;
23b2ce53 2142}
792760b9 2143
738cc472
RK
2144/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2145 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2146
2147rtx
ef4bddc2 2148change_address (rtx memref, machine_mode mode, rtx addr)
f4ef873c 2149{
23b33725 2150 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
ef4bddc2 2151 machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2152 struct mem_attrs attrs, *defattrs;
4e44c1ef 2153
f12144dd
RS
2154 attrs = *get_mem_attrs (memref);
2155 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2156 attrs.expr = NULL_TREE;
2157 attrs.offset_known_p = false;
2158 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2159 attrs.size = defattrs->size;
2160 attrs.align = defattrs->align;
c2f7bcc3 2161
fdb1c7b3 2162 /* If there are no changes, just return the original memory reference. */
60564289 2163 if (new_rtx == memref)
4e44c1ef 2164 {
f12144dd 2165 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2166 return new_rtx;
4e44c1ef 2167
60564289
KG
2168 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2169 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2170 }
fdb1c7b3 2171
f12144dd 2172 set_mem_attrs (new_rtx, &attrs);
60564289 2173 return new_rtx;
f4ef873c 2174}
792760b9 2175
738cc472
RK
2176/* Return a memory reference like MEMREF, but with its mode changed
2177 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2178 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2179 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2180 and the caller is responsible for adjusting MEMREF base register.
2181 If ADJUST_OBJECT is zero, the underlying object associated with the
2182 memory reference is left unchanged and the caller is responsible for
2183 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2184 the underlying object, even partially, then the object is dropped.
2185 SIZE, if nonzero, is the size of an access in cases where MODE
2186 has no inherent size. */
f1ec5147
RK
2187
2188rtx
ef4bddc2 2189adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2190 int validate, int adjust_address, int adjust_object,
2191 HOST_WIDE_INT size)
f1ec5147 2192{
823e3574 2193 rtx addr = XEXP (memref, 0);
60564289 2194 rtx new_rtx;
ef4bddc2 2195 machine_mode address_mode;
a6fe9ed4 2196 int pbits;
0207fa90 2197 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2198 unsigned HOST_WIDE_INT max_align;
0207fa90 2199#ifdef POINTERS_EXTEND_UNSIGNED
ef4bddc2 2200 machine_mode pointer_mode
0207fa90
EB
2201 = targetm.addr_space.pointer_mode (attrs.addrspace);
2202#endif
823e3574 2203
ee88e690
EB
2204 /* VOIDmode means no mode change for change_address_1. */
2205 if (mode == VOIDmode)
2206 mode = GET_MODE (memref);
2207
5f2cbd0d
RS
2208 /* Take the size of non-BLKmode accesses from the mode. */
2209 defattrs = mode_mem_attrs[(int) mode];
2210 if (defattrs->size_known_p)
2211 size = defattrs->size;
2212
fdb1c7b3
JH
2213 /* If there are no changes, just return the original memory reference. */
2214 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2215 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2216 && (!validate || memory_address_addr_space_p (mode, addr,
2217 attrs.addrspace)))
fdb1c7b3
JH
2218 return memref;
2219
d14419e4 2220 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2221 This may happen even if offset is nonzero -- consider
d14419e4
RH
2222 (plus (plus reg reg) const_int) -- so do this always. */
2223 addr = copy_rtx (addr);
2224
a6fe9ed4
JM
2225 /* Convert a possibly large offset to a signed value within the
2226 range of the target address space. */
372d6395 2227 address_mode = get_address_mode (memref);
d4ebfa65 2228 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2229 if (HOST_BITS_PER_WIDE_INT > pbits)
2230 {
2231 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2232 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2233 >> shift);
2234 }
2235
5ef0b50d 2236 if (adjust_address)
4a78c787
RH
2237 {
2238 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2239 object, we can merge it into the LO_SUM. */
2240 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2241 && offset >= 0
2242 && (unsigned HOST_WIDE_INT) offset
2243 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2244 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2245 plus_constant (address_mode,
2246 XEXP (addr, 1), offset));
0207fa90
EB
2247#ifdef POINTERS_EXTEND_UNSIGNED
2248 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2249 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2250 the fact that pointers are not allowed to overflow. */
2251 else if (POINTERS_EXTEND_UNSIGNED > 0
2252 && GET_CODE (addr) == ZERO_EXTEND
2253 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2254 && trunc_int_for_mode (offset, pointer_mode) == offset)
2255 addr = gen_rtx_ZERO_EXTEND (address_mode,
2256 plus_constant (pointer_mode,
2257 XEXP (addr, 0), offset));
2258#endif
4a78c787 2259 else
0a81f074 2260 addr = plus_constant (address_mode, addr, offset);
4a78c787 2261 }
823e3574 2262
23b33725 2263 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2264
09efeca1
PB
2265 /* If the address is a REG, change_address_1 rightfully returns memref,
2266 but this would destroy memref's MEM_ATTRS. */
2267 if (new_rtx == memref && offset != 0)
2268 new_rtx = copy_rtx (new_rtx);
2269
5ef0b50d
EB
2270 /* Conservatively drop the object if we don't know where we start from. */
2271 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2272 {
2273 attrs.expr = NULL_TREE;
2274 attrs.alias = 0;
2275 }
2276
738cc472
RK
2277 /* Compute the new values of the memory attributes due to this adjustment.
2278 We add the offsets and update the alignment. */
754c3d5d 2279 if (attrs.offset_known_p)
5ef0b50d
EB
2280 {
2281 attrs.offset += offset;
2282
2283 /* Drop the object if the new left end is not within its bounds. */
2284 if (adjust_object && attrs.offset < 0)
2285 {
2286 attrs.expr = NULL_TREE;
2287 attrs.alias = 0;
2288 }
2289 }
738cc472 2290
03bf2c23
RK
2291 /* Compute the new alignment by taking the MIN of the alignment and the
2292 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2293 if zero. */
2294 if (offset != 0)
f12144dd
RS
2295 {
2296 max_align = (offset & -offset) * BITS_PER_UNIT;
2297 attrs.align = MIN (attrs.align, max_align);
2298 }
738cc472 2299
5f2cbd0d 2300 if (size)
754c3d5d 2301 {
5ef0b50d 2302 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2303 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2304 {
2305 attrs.expr = NULL_TREE;
2306 attrs.alias = 0;
2307 }
754c3d5d 2308 attrs.size_known_p = true;
5f2cbd0d 2309 attrs.size = size;
754c3d5d
RS
2310 }
2311 else if (attrs.size_known_p)
5ef0b50d 2312 {
5f2cbd0d 2313 gcc_assert (!adjust_object);
5ef0b50d 2314 attrs.size -= offset;
5f2cbd0d
RS
2315 /* ??? The store_by_pieces machinery generates negative sizes,
2316 so don't assert for that here. */
5ef0b50d 2317 }
10b76d73 2318
f12144dd 2319 set_mem_attrs (new_rtx, &attrs);
738cc472 2320
60564289 2321 return new_rtx;
f1ec5147
RK
2322}
2323
630036c6
JJ
2324/* Return a memory reference like MEMREF, but with its mode changed
2325 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2326 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2327 nonzero, the memory address is forced to be valid. */
2328
2329rtx
ef4bddc2 2330adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
502b8322 2331 HOST_WIDE_INT offset, int validate)
630036c6 2332{
23b33725 2333 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2334 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2335}
2336
8ac61af7
RK
2337/* Return a memory reference like MEMREF, but whose address is changed by
2338 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2339 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2340
2341rtx
502b8322 2342offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2343{
60564289 2344 rtx new_rtx, addr = XEXP (memref, 0);
ef4bddc2 2345 machine_mode address_mode;
754c3d5d 2346 struct mem_attrs attrs, *defattrs;
e3c8ea67 2347
f12144dd 2348 attrs = *get_mem_attrs (memref);
372d6395 2349 address_mode = get_address_mode (memref);
d4ebfa65 2350 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2351
68252e27 2352 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2353 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2354
2355 However, if we did go and rearrange things, we can wind up not
2356 being able to recognize the magic around pic_offset_table_rtx.
2357 This stuff is fragile, and is yet another example of why it is
2358 bad to expose PIC machinery too early. */
f12144dd
RS
2359 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2360 attrs.addrspace)
e3c8ea67
RH
2361 && GET_CODE (addr) == PLUS
2362 && XEXP (addr, 0) == pic_offset_table_rtx)
2363 {
2364 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2365 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2366 }
2367
60564289 2368 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2369 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2370
fdb1c7b3 2371 /* If there are no changes, just return the original memory reference. */
60564289
KG
2372 if (new_rtx == memref)
2373 return new_rtx;
fdb1c7b3 2374
0d4903b8
RK
2375 /* Update the alignment to reflect the offset. Reset the offset, which
2376 we don't know. */
754c3d5d
RS
2377 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2378 attrs.offset_known_p = false;
2379 attrs.size_known_p = defattrs->size_known_p;
2380 attrs.size = defattrs->size;
f12144dd
RS
2381 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2382 set_mem_attrs (new_rtx, &attrs);
60564289 2383 return new_rtx;
0d4903b8 2384}
68252e27 2385
792760b9
RK
2386/* Return a memory reference like MEMREF, but with its address changed to
2387 ADDR. The caller is asserting that the actual piece of memory pointed
2388 to is the same, just the form of the address is being changed, such as
23b33725
RS
2389 by putting something into a register. INPLACE is true if any changes
2390 can be made directly to MEMREF or false if MEMREF must be treated as
2391 immutable. */
792760b9
RK
2392
2393rtx
23b33725 2394replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2395{
738cc472
RK
2396 /* change_address_1 copies the memory attribute structure without change
2397 and that's exactly what we want here. */
40c0668b 2398 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2399 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2400}
738cc472 2401
f1ec5147
RK
2402/* Likewise, but the reference is not required to be valid. */
2403
2404rtx
23b33725 2405replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2406{
23b33725 2407 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2408}
e7dfe4bb
RH
2409
2410/* Return a memory reference like MEMREF, but with its mode widened to
2411 MODE and offset by OFFSET. This would be used by targets that e.g.
2412 cannot issue QImode memory operations and have to use SImode memory
2413 operations plus masking logic. */
2414
2415rtx
ef4bddc2 2416widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2417{
5f2cbd0d 2418 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2419 struct mem_attrs attrs;
e7dfe4bb
RH
2420 unsigned int size = GET_MODE_SIZE (mode);
2421
fdb1c7b3 2422 /* If there are no changes, just return the original memory reference. */
60564289
KG
2423 if (new_rtx == memref)
2424 return new_rtx;
fdb1c7b3 2425
f12144dd
RS
2426 attrs = *get_mem_attrs (new_rtx);
2427
e7dfe4bb
RH
2428 /* If we don't know what offset we were at within the expression, then
2429 we can't know if we've overstepped the bounds. */
754c3d5d 2430 if (! attrs.offset_known_p)
f12144dd 2431 attrs.expr = NULL_TREE;
e7dfe4bb 2432
f12144dd 2433 while (attrs.expr)
e7dfe4bb 2434 {
f12144dd 2435 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2436 {
f12144dd
RS
2437 tree field = TREE_OPERAND (attrs.expr, 1);
2438 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2439
2440 if (! DECL_SIZE_UNIT (field))
2441 {
f12144dd 2442 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2443 break;
2444 }
2445
2446 /* Is the field at least as large as the access? If so, ok,
2447 otherwise strip back to the containing structure. */
03667700
RK
2448 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2449 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2450 && attrs.offset >= 0)
e7dfe4bb
RH
2451 break;
2452
cc269bb6 2453 if (! tree_fits_uhwi_p (offset))
e7dfe4bb 2454 {
f12144dd 2455 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2456 break;
2457 }
2458
f12144dd 2459 attrs.expr = TREE_OPERAND (attrs.expr, 0);
ae7e9ddd
RS
2460 attrs.offset += tree_to_uhwi (offset);
2461 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2462 / BITS_PER_UNIT);
e7dfe4bb
RH
2463 }
2464 /* Similarly for the decl. */
f12144dd
RS
2465 else if (DECL_P (attrs.expr)
2466 && DECL_SIZE_UNIT (attrs.expr)
2467 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2468 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2469 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2470 break;
2471 else
2472 {
2473 /* The widened memory access overflows the expression, which means
2474 that it could alias another expression. Zap it. */
f12144dd 2475 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2476 break;
2477 }
2478 }
2479
f12144dd 2480 if (! attrs.expr)
754c3d5d 2481 attrs.offset_known_p = false;
e7dfe4bb
RH
2482
2483 /* The widened memory may alias other stuff, so zap the alias set. */
2484 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2485 attrs.alias = 0;
754c3d5d
RS
2486 attrs.size_known_p = true;
2487 attrs.size = size;
f12144dd 2488 set_mem_attrs (new_rtx, &attrs);
60564289 2489 return new_rtx;
e7dfe4bb 2490}
23b2ce53 2491\f
f6129d66
RH
2492/* A fake decl that is used as the MEM_EXPR of spill slots. */
2493static GTY(()) tree spill_slot_decl;
2494
3d7e23f6
RH
2495tree
2496get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2497{
2498 tree d = spill_slot_decl;
2499 rtx rd;
f12144dd 2500 struct mem_attrs attrs;
f6129d66 2501
3d7e23f6 2502 if (d || !force_build_p)
f6129d66
RH
2503 return d;
2504
c2255bc4
AH
2505 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2506 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2507 DECL_ARTIFICIAL (d) = 1;
2508 DECL_IGNORED_P (d) = 1;
2509 TREE_USED (d) = 1;
f6129d66
RH
2510 spill_slot_decl = d;
2511
2512 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2513 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2514 attrs = *mode_mem_attrs[(int) BLKmode];
2515 attrs.alias = new_alias_set ();
2516 attrs.expr = d;
2517 set_mem_attrs (rd, &attrs);
f6129d66
RH
2518 SET_DECL_RTL (d, rd);
2519
2520 return d;
2521}
2522
2523/* Given MEM, a result from assign_stack_local, fill in the memory
2524 attributes as appropriate for a register allocator spill slot.
2525 These slots are not aliasable by other memory. We arrange for
2526 them all to use a single MEM_EXPR, so that the aliasing code can
2527 work properly in the case of shared spill slots. */
2528
2529void
2530set_mem_attrs_for_spill (rtx mem)
2531{
f12144dd
RS
2532 struct mem_attrs attrs;
2533 rtx addr;
f6129d66 2534
f12144dd
RS
2535 attrs = *get_mem_attrs (mem);
2536 attrs.expr = get_spill_slot_decl (true);
2537 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2538 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2539
2540 /* We expect the incoming memory to be of the form:
2541 (mem:MODE (plus (reg sfp) (const_int offset)))
2542 with perhaps the plus missing for offset = 0. */
2543 addr = XEXP (mem, 0);
754c3d5d
RS
2544 attrs.offset_known_p = true;
2545 attrs.offset = 0;
f6129d66 2546 if (GET_CODE (addr) == PLUS
481683e1 2547 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2548 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2549
f12144dd 2550 set_mem_attrs (mem, &attrs);
f6129d66
RH
2551 MEM_NOTRAP_P (mem) = 1;
2552}
2553\f
23b2ce53
RS
2554/* Return a newly created CODE_LABEL rtx with a unique label number. */
2555
7dcc3ab5 2556rtx_code_label *
502b8322 2557gen_label_rtx (void)
23b2ce53 2558{
7dcc3ab5
DM
2559 return as_a <rtx_code_label *> (
2560 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2561 NULL, label_num++, NULL));
23b2ce53
RS
2562}
2563\f
2564/* For procedure integration. */
2565
23b2ce53 2566/* Install new pointers to the first and last insns in the chain.
86fe05e0 2567 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2568 Used for an inline-procedure after copying the insn chain. */
2569
2570void
fee3e72c 2571set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2572{
fee3e72c 2573 rtx_insn *insn;
86fe05e0 2574
5936d944
JH
2575 set_first_insn (first);
2576 set_last_insn (last);
86fe05e0
RK
2577 cur_insn_uid = 0;
2578
b5b8b0ac
AO
2579 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2580 {
2581 int debug_count = 0;
2582
2583 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2584 cur_debug_insn_uid = 0;
2585
2586 for (insn = first; insn; insn = NEXT_INSN (insn))
2587 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2588 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2589 else
2590 {
2591 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2592 if (DEBUG_INSN_P (insn))
2593 debug_count++;
2594 }
2595
2596 if (debug_count)
2597 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2598 else
2599 cur_debug_insn_uid++;
2600 }
2601 else
2602 for (insn = first; insn; insn = NEXT_INSN (insn))
2603 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2604
2605 cur_insn_uid++;
23b2ce53 2606}
23b2ce53 2607\f
750c9258 2608/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2609 structure. This routine should only be called once. */
23b2ce53 2610
fd743bc1 2611static void
6bb9bf63 2612unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2613{
d1b81779 2614 /* Unshare just about everything else. */
2c07f13b 2615 unshare_all_rtl_in_chain (insn);
750c9258 2616
23b2ce53
RS
2617 /* Make sure the addresses of stack slots found outside the insn chain
2618 (such as, in DECL_RTL of a variable) are not shared
2619 with the insn chain.
2620
2621 This special care is necessary when the stack slot MEM does not
2622 actually appear in the insn chain. If it does appear, its address
2623 is unshared from all else at that point. */
0f4783c7
DM
2624 stack_slot_list = safe_as_a <rtx_expr_list *> (
2625 copy_rtx_if_shared (stack_slot_list));
23b2ce53
RS
2626}
2627
750c9258 2628/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2629 structure, again. This is a fairly expensive thing to do so it
2630 should be done sparingly. */
2631
2632void
6bb9bf63 2633unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2634{
6bb9bf63 2635 rtx_insn *p;
624c87aa
RE
2636 tree decl;
2637
d1b81779 2638 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2639 if (INSN_P (p))
d1b81779
GK
2640 {
2641 reset_used_flags (PATTERN (p));
2642 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2643 if (CALL_P (p))
2644 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2645 }
624c87aa 2646
2d4aecb3 2647 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2648 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2649
624c87aa 2650 /* Make sure that virtual parameters are not shared. */
910ad8de 2651 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2652 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2653
2654 reset_used_flags (stack_slot_list);
2655
b4aaa77b 2656 unshare_all_rtl_1 (insn);
fd743bc1
PB
2657}
2658
c2924966 2659unsigned int
fd743bc1
PB
2660unshare_all_rtl (void)
2661{
b4aaa77b 2662 unshare_all_rtl_1 (get_insns ());
c2924966 2663 return 0;
d1b81779
GK
2664}
2665
ef330312 2666
2c07f13b
JH
2667/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2668 Recursively does the same for subexpressions. */
2669
2670static void
2671verify_rtx_sharing (rtx orig, rtx insn)
2672{
2673 rtx x = orig;
2674 int i;
2675 enum rtx_code code;
2676 const char *format_ptr;
2677
2678 if (x == 0)
2679 return;
2680
2681 code = GET_CODE (x);
2682
2683 /* These types may be freely shared. */
2684
2685 switch (code)
2686 {
2687 case REG:
0ca5af51
AO
2688 case DEBUG_EXPR:
2689 case VALUE:
d8116890 2690 CASE_CONST_ANY:
2c07f13b
JH
2691 case SYMBOL_REF:
2692 case LABEL_REF:
2693 case CODE_LABEL:
2694 case PC:
2695 case CC0:
3810076b 2696 case RETURN:
26898771 2697 case SIMPLE_RETURN:
2c07f13b 2698 case SCRATCH:
3e89ed8d 2699 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2700 return;
3e89ed8d 2701 case CLOBBER:
c5c5ba89
JH
2702 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2703 clobbers or clobbers of hard registers that originated as pseudos.
2704 This is needed to allow safe register renaming. */
2705 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2706 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2707 return;
2708 break;
2c07f13b
JH
2709
2710 case CONST:
6fb5fa3c 2711 if (shared_const_p (orig))
2c07f13b
JH
2712 return;
2713 break;
2714
2715 case MEM:
2716 /* A MEM is allowed to be shared if its address is constant. */
2717 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2718 || reload_completed || reload_in_progress)
2719 return;
2720
2721 break;
2722
2723 default:
2724 break;
2725 }
2726
2727 /* This rtx may not be shared. If it has already been seen,
2728 replace it with a copy of itself. */
b2b29377 2729 if (flag_checking && RTX_FLAG (x, used))
2c07f13b 2730 {
ab532386 2731 error ("invalid rtl sharing found in the insn");
2c07f13b 2732 debug_rtx (insn);
ab532386 2733 error ("shared rtx");
2c07f13b 2734 debug_rtx (x);
ab532386 2735 internal_error ("internal consistency failure");
2c07f13b 2736 }
1a2caa7a 2737 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2738
2c07f13b
JH
2739 RTX_FLAG (x, used) = 1;
2740
6614fd40 2741 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2742
2743 format_ptr = GET_RTX_FORMAT (code);
2744
2745 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2746 {
2747 switch (*format_ptr++)
2748 {
2749 case 'e':
2750 verify_rtx_sharing (XEXP (x, i), insn);
2751 break;
2752
2753 case 'E':
2754 if (XVEC (x, i) != NULL)
2755 {
2756 int j;
2757 int len = XVECLEN (x, i);
2758
2759 for (j = 0; j < len; j++)
2760 {
1a2caa7a
NS
2761 /* We allow sharing of ASM_OPERANDS inside single
2762 instruction. */
2c07f13b 2763 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2764 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2765 == ASM_OPERANDS))
2c07f13b
JH
2766 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2767 else
2768 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2769 }
2770 }
2771 break;
2772 }
2773 }
2774 return;
2775}
2776
0e0f87d4
SB
2777/* Reset used-flags for INSN. */
2778
2779static void
2780reset_insn_used_flags (rtx insn)
2781{
2782 gcc_assert (INSN_P (insn));
2783 reset_used_flags (PATTERN (insn));
2784 reset_used_flags (REG_NOTES (insn));
2785 if (CALL_P (insn))
2786 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2787}
2788
a24243a0 2789/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2790
a24243a0
AK
2791static void
2792reset_all_used_flags (void)
2c07f13b 2793{
dc01c3d1 2794 rtx_insn *p;
2c07f13b
JH
2795
2796 for (p = get_insns (); p; p = NEXT_INSN (p))
2797 if (INSN_P (p))
2798 {
0e0f87d4
SB
2799 rtx pat = PATTERN (p);
2800 if (GET_CODE (pat) != SEQUENCE)
2801 reset_insn_used_flags (p);
2802 else
2954a813 2803 {
0e0f87d4
SB
2804 gcc_assert (REG_NOTES (p) == NULL);
2805 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2806 {
2807 rtx insn = XVECEXP (pat, 0, i);
2808 if (INSN_P (insn))
2809 reset_insn_used_flags (insn);
2810 }
2954a813 2811 }
2c07f13b 2812 }
a24243a0
AK
2813}
2814
0e0f87d4
SB
2815/* Verify sharing in INSN. */
2816
2817static void
2818verify_insn_sharing (rtx insn)
2819{
2820 gcc_assert (INSN_P (insn));
2821 reset_used_flags (PATTERN (insn));
2822 reset_used_flags (REG_NOTES (insn));
2823 if (CALL_P (insn))
2824 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2825}
2826
a24243a0
AK
2827/* Go through all the RTL insn bodies and check that there is no unexpected
2828 sharing in between the subexpressions. */
2829
2830DEBUG_FUNCTION void
2831verify_rtl_sharing (void)
2832{
dc01c3d1 2833 rtx_insn *p;
a24243a0
AK
2834
2835 timevar_push (TV_VERIFY_RTL_SHARING);
2836
2837 reset_all_used_flags ();
2c07f13b
JH
2838
2839 for (p = get_insns (); p; p = NEXT_INSN (p))
2840 if (INSN_P (p))
2841 {
0e0f87d4
SB
2842 rtx pat = PATTERN (p);
2843 if (GET_CODE (pat) != SEQUENCE)
2844 verify_insn_sharing (p);
2845 else
2846 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2847 {
2848 rtx insn = XVECEXP (pat, 0, i);
2849 if (INSN_P (insn))
2850 verify_insn_sharing (insn);
2851 }
2c07f13b 2852 }
a222c01a 2853
a24243a0
AK
2854 reset_all_used_flags ();
2855
a222c01a 2856 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2857}
2858
d1b81779
GK
2859/* Go through all the RTL insn bodies and copy any invalid shared structure.
2860 Assumes the mark bits are cleared at entry. */
2861
2c07f13b 2862void
dc01c3d1 2863unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
2864{
2865 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2866 if (INSN_P (insn))
d1b81779
GK
2867 {
2868 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2869 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2870 if (CALL_P (insn))
2871 CALL_INSN_FUNCTION_USAGE (insn)
2872 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2873 }
2874}
2875
2d4aecb3 2876/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2877 shared. We never replace the DECL_RTLs themselves with a copy,
2878 but expressions mentioned into a DECL_RTL cannot be shared with
2879 expressions in the instruction stream.
2880
2881 Note that reload may convert pseudo registers into memories in-place.
2882 Pseudo registers are always shared, but MEMs never are. Thus if we
2883 reset the used flags on MEMs in the instruction stream, we must set
2884 them again on MEMs that appear in DECL_RTLs. */
2885
2d4aecb3 2886static void
5eb2a9f2 2887set_used_decls (tree blk)
2d4aecb3
AO
2888{
2889 tree t;
2890
2891 /* Mark decls. */
910ad8de 2892 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2893 if (DECL_RTL_SET_P (t))
5eb2a9f2 2894 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2895
2896 /* Now process sub-blocks. */
87caf699 2897 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2898 set_used_decls (t);
2d4aecb3
AO
2899}
2900
23b2ce53 2901/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2902 Recursively does the same for subexpressions. Uses
2903 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2904
2905rtx
502b8322 2906copy_rtx_if_shared (rtx orig)
23b2ce53 2907{
32b32b16
AP
2908 copy_rtx_if_shared_1 (&orig);
2909 return orig;
2910}
2911
ff954f39
AP
2912/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2913 use. Recursively does the same for subexpressions. */
2914
32b32b16
AP
2915static void
2916copy_rtx_if_shared_1 (rtx *orig1)
2917{
2918 rtx x;
b3694847
SS
2919 int i;
2920 enum rtx_code code;
32b32b16 2921 rtx *last_ptr;
b3694847 2922 const char *format_ptr;
23b2ce53 2923 int copied = 0;
32b32b16
AP
2924 int length;
2925
2926 /* Repeat is used to turn tail-recursion into iteration. */
2927repeat:
2928 x = *orig1;
23b2ce53
RS
2929
2930 if (x == 0)
32b32b16 2931 return;
23b2ce53
RS
2932
2933 code = GET_CODE (x);
2934
2935 /* These types may be freely shared. */
2936
2937 switch (code)
2938 {
2939 case REG:
0ca5af51
AO
2940 case DEBUG_EXPR:
2941 case VALUE:
d8116890 2942 CASE_CONST_ANY:
23b2ce53 2943 case SYMBOL_REF:
2c07f13b 2944 case LABEL_REF:
23b2ce53
RS
2945 case CODE_LABEL:
2946 case PC:
2947 case CC0:
276e0224 2948 case RETURN:
26898771 2949 case SIMPLE_RETURN:
23b2ce53 2950 case SCRATCH:
0f41302f 2951 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2952 return;
3e89ed8d 2953 case CLOBBER:
c5c5ba89
JH
2954 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2955 clobbers or clobbers of hard registers that originated as pseudos.
2956 This is needed to allow safe register renaming. */
2957 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2958 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2959 return;
2960 break;
23b2ce53 2961
b851ea09 2962 case CONST:
6fb5fa3c 2963 if (shared_const_p (x))
32b32b16 2964 return;
b851ea09
RK
2965 break;
2966
b5b8b0ac 2967 case DEBUG_INSN:
23b2ce53
RS
2968 case INSN:
2969 case JUMP_INSN:
2970 case CALL_INSN:
2971 case NOTE:
23b2ce53
RS
2972 case BARRIER:
2973 /* The chain of insns is not being copied. */
32b32b16 2974 return;
23b2ce53 2975
e9a25f70
JL
2976 default:
2977 break;
23b2ce53
RS
2978 }
2979
2980 /* This rtx may not be shared. If it has already been seen,
2981 replace it with a copy of itself. */
2982
2adc7f12 2983 if (RTX_FLAG (x, used))
23b2ce53 2984 {
aacd3885 2985 x = shallow_copy_rtx (x);
23b2ce53
RS
2986 copied = 1;
2987 }
2adc7f12 2988 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2989
2990 /* Now scan the subexpressions recursively.
2991 We can store any replaced subexpressions directly into X
2992 since we know X is not shared! Any vectors in X
2993 must be copied if X was copied. */
2994
2995 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2996 length = GET_RTX_LENGTH (code);
2997 last_ptr = NULL;
b8698a0f 2998
32b32b16 2999 for (i = 0; i < length; i++)
23b2ce53
RS
3000 {
3001 switch (*format_ptr++)
3002 {
3003 case 'e':
32b32b16
AP
3004 if (last_ptr)
3005 copy_rtx_if_shared_1 (last_ptr);
3006 last_ptr = &XEXP (x, i);
23b2ce53
RS
3007 break;
3008
3009 case 'E':
3010 if (XVEC (x, i) != NULL)
3011 {
b3694847 3012 int j;
f0722107 3013 int len = XVECLEN (x, i);
b8698a0f 3014
6614fd40
KH
3015 /* Copy the vector iff I copied the rtx and the length
3016 is nonzero. */
f0722107 3017 if (copied && len > 0)
8f985ec4 3018 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 3019
5d3cc252 3020 /* Call recursively on all inside the vector. */
f0722107 3021 for (j = 0; j < len; j++)
32b32b16
AP
3022 {
3023 if (last_ptr)
3024 copy_rtx_if_shared_1 (last_ptr);
3025 last_ptr = &XVECEXP (x, i, j);
3026 }
23b2ce53
RS
3027 }
3028 break;
3029 }
3030 }
32b32b16
AP
3031 *orig1 = x;
3032 if (last_ptr)
3033 {
3034 orig1 = last_ptr;
3035 goto repeat;
3036 }
3037 return;
23b2ce53
RS
3038}
3039
76369a82 3040/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 3041
76369a82
NF
3042static void
3043mark_used_flags (rtx x, int flag)
23b2ce53 3044{
b3694847
SS
3045 int i, j;
3046 enum rtx_code code;
3047 const char *format_ptr;
32b32b16 3048 int length;
23b2ce53 3049
32b32b16
AP
3050 /* Repeat is used to turn tail-recursion into iteration. */
3051repeat:
23b2ce53
RS
3052 if (x == 0)
3053 return;
3054
3055 code = GET_CODE (x);
3056
9faa82d8 3057 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3058 for them. */
3059
3060 switch (code)
3061 {
3062 case REG:
0ca5af51
AO
3063 case DEBUG_EXPR:
3064 case VALUE:
d8116890 3065 CASE_CONST_ANY:
23b2ce53
RS
3066 case SYMBOL_REF:
3067 case CODE_LABEL:
3068 case PC:
3069 case CC0:
276e0224 3070 case RETURN:
26898771 3071 case SIMPLE_RETURN:
23b2ce53
RS
3072 return;
3073
b5b8b0ac 3074 case DEBUG_INSN:
23b2ce53
RS
3075 case INSN:
3076 case JUMP_INSN:
3077 case CALL_INSN:
3078 case NOTE:
3079 case LABEL_REF:
3080 case BARRIER:
3081 /* The chain of insns is not being copied. */
3082 return;
750c9258 3083
e9a25f70
JL
3084 default:
3085 break;
23b2ce53
RS
3086 }
3087
76369a82 3088 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3089
3090 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3091 length = GET_RTX_LENGTH (code);
b8698a0f 3092
32b32b16 3093 for (i = 0; i < length; i++)
23b2ce53
RS
3094 {
3095 switch (*format_ptr++)
3096 {
3097 case 'e':
32b32b16
AP
3098 if (i == length-1)
3099 {
3100 x = XEXP (x, i);
3101 goto repeat;
3102 }
76369a82 3103 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3104 break;
3105
3106 case 'E':
3107 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3108 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3109 break;
3110 }
3111 }
3112}
2c07f13b 3113
76369a82 3114/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3115 to look for shared sub-parts. */
3116
3117void
76369a82 3118reset_used_flags (rtx x)
2c07f13b 3119{
76369a82
NF
3120 mark_used_flags (x, 0);
3121}
2c07f13b 3122
76369a82
NF
3123/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3124 to look for shared sub-parts. */
2c07f13b 3125
76369a82
NF
3126void
3127set_used_flags (rtx x)
3128{
3129 mark_used_flags (x, 1);
2c07f13b 3130}
23b2ce53
RS
3131\f
3132/* Copy X if necessary so that it won't be altered by changes in OTHER.
3133 Return X or the rtx for the pseudo reg the value of X was copied into.
3134 OTHER must be valid as a SET_DEST. */
3135
3136rtx
502b8322 3137make_safe_from (rtx x, rtx other)
23b2ce53
RS
3138{
3139 while (1)
3140 switch (GET_CODE (other))
3141 {
3142 case SUBREG:
3143 other = SUBREG_REG (other);
3144 break;
3145 case STRICT_LOW_PART:
3146 case SIGN_EXTEND:
3147 case ZERO_EXTEND:
3148 other = XEXP (other, 0);
3149 break;
3150 default:
3151 goto done;
3152 }
3153 done:
3c0cb5de 3154 if ((MEM_P (other)
23b2ce53 3155 && ! CONSTANT_P (x)
f8cfc6aa 3156 && !REG_P (x)
23b2ce53 3157 && GET_CODE (x) != SUBREG)
f8cfc6aa 3158 || (REG_P (other)
23b2ce53
RS
3159 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3160 || reg_mentioned_p (other, x))))
3161 {
3162 rtx temp = gen_reg_rtx (GET_MODE (x));
3163 emit_move_insn (temp, x);
3164 return temp;
3165 }
3166 return x;
3167}
3168\f
3169/* Emission of insns (adding them to the doubly-linked list). */
3170
23b2ce53
RS
3171/* Return the last insn emitted, even if it is in a sequence now pushed. */
3172
db76cf1e 3173rtx_insn *
502b8322 3174get_last_insn_anywhere (void)
23b2ce53 3175{
614d5bd8
AM
3176 struct sequence_stack *seq;
3177 for (seq = get_current_sequence (); seq; seq = seq->next)
3178 if (seq->last != 0)
3179 return seq->last;
23b2ce53
RS
3180 return 0;
3181}
3182
2a496e8b
JDA
3183/* Return the first nonnote insn emitted in current sequence or current
3184 function. This routine looks inside SEQUENCEs. */
3185
e4685bc8 3186rtx_insn *
502b8322 3187get_first_nonnote_insn (void)
2a496e8b 3188{
dc01c3d1 3189 rtx_insn *insn = get_insns ();
91373fe8
JDA
3190
3191 if (insn)
3192 {
3193 if (NOTE_P (insn))
3194 for (insn = next_insn (insn);
3195 insn && NOTE_P (insn);
3196 insn = next_insn (insn))
3197 continue;
3198 else
3199 {
2ca202e7 3200 if (NONJUMP_INSN_P (insn)
91373fe8 3201 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3202 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3203 }
3204 }
2a496e8b
JDA
3205
3206 return insn;
3207}
3208
3209/* Return the last nonnote insn emitted in current sequence or current
3210 function. This routine looks inside SEQUENCEs. */
3211
e4685bc8 3212rtx_insn *
502b8322 3213get_last_nonnote_insn (void)
2a496e8b 3214{
dc01c3d1 3215 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3216
3217 if (insn)
3218 {
3219 if (NOTE_P (insn))
3220 for (insn = previous_insn (insn);
3221 insn && NOTE_P (insn);
3222 insn = previous_insn (insn))
3223 continue;
3224 else
3225 {
dc01c3d1
DM
3226 if (NONJUMP_INSN_P (insn))
3227 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3228 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3229 }
3230 }
2a496e8b
JDA
3231
3232 return insn;
3233}
3234
b5b8b0ac
AO
3235/* Return the number of actual (non-debug) insns emitted in this
3236 function. */
3237
3238int
3239get_max_insn_count (void)
3240{
3241 int n = cur_insn_uid;
3242
3243 /* The table size must be stable across -g, to avoid codegen
3244 differences due to debug insns, and not be affected by
3245 -fmin-insn-uid, to avoid excessive table size and to simplify
3246 debugging of -fcompare-debug failures. */
3247 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3248 n -= cur_debug_insn_uid;
3249 else
3250 n -= MIN_NONDEBUG_INSN_UID;
3251
3252 return n;
3253}
3254
23b2ce53
RS
3255\f
3256/* Return the next insn. If it is a SEQUENCE, return the first insn
3257 of the sequence. */
3258
eb51c837 3259rtx_insn *
4ce524a1 3260next_insn (rtx_insn *insn)
23b2ce53 3261{
75547801
KG
3262 if (insn)
3263 {
3264 insn = NEXT_INSN (insn);
3265 if (insn && NONJUMP_INSN_P (insn)
3266 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3267 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3268 }
23b2ce53 3269
dc01c3d1 3270 return insn;
23b2ce53
RS
3271}
3272
3273/* Return the previous insn. If it is a SEQUENCE, return the last insn
3274 of the sequence. */
3275
eb51c837 3276rtx_insn *
4ce524a1 3277previous_insn (rtx_insn *insn)
23b2ce53 3278{
75547801
KG
3279 if (insn)
3280 {
3281 insn = PREV_INSN (insn);
dc01c3d1
DM
3282 if (insn && NONJUMP_INSN_P (insn))
3283 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3284 insn = seq->insn (seq->len () - 1);
75547801 3285 }
23b2ce53 3286
dc01c3d1 3287 return insn;
23b2ce53
RS
3288}
3289
3290/* Return the next insn after INSN that is not a NOTE. This routine does not
3291 look inside SEQUENCEs. */
3292
eb51c837 3293rtx_insn *
dc01c3d1 3294next_nonnote_insn (rtx uncast_insn)
23b2ce53 3295{
dc01c3d1 3296 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
75547801
KG
3297 while (insn)
3298 {
3299 insn = NEXT_INSN (insn);
3300 if (insn == 0 || !NOTE_P (insn))
3301 break;
3302 }
23b2ce53 3303
dc01c3d1 3304 return insn;
23b2ce53
RS
3305}
3306
1e211590
DD
3307/* Return the next insn after INSN that is not a NOTE, but stop the
3308 search before we enter another basic block. This routine does not
3309 look inside SEQUENCEs. */
3310
eb51c837 3311rtx_insn *
e4685bc8 3312next_nonnote_insn_bb (rtx_insn *insn)
1e211590
DD
3313{
3314 while (insn)
3315 {
3316 insn = NEXT_INSN (insn);
3317 if (insn == 0 || !NOTE_P (insn))
3318 break;
3319 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3320 return NULL;
1e211590
DD
3321 }
3322
dc01c3d1 3323 return insn;
1e211590
DD
3324}
3325
23b2ce53
RS
3326/* Return the previous insn before INSN that is not a NOTE. This routine does
3327 not look inside SEQUENCEs. */
3328
eb51c837 3329rtx_insn *
dc01c3d1 3330prev_nonnote_insn (rtx uncast_insn)
23b2ce53 3331{
dc01c3d1
DM
3332 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3333
75547801
KG
3334 while (insn)
3335 {
3336 insn = PREV_INSN (insn);
3337 if (insn == 0 || !NOTE_P (insn))
3338 break;
3339 }
23b2ce53 3340
dc01c3d1 3341 return insn;
23b2ce53
RS
3342}
3343
896aa4ea
DD
3344/* Return the previous insn before INSN that is not a NOTE, but stop
3345 the search before we enter another basic block. This routine does
3346 not look inside SEQUENCEs. */
3347
eb51c837 3348rtx_insn *
dc01c3d1 3349prev_nonnote_insn_bb (rtx uncast_insn)
896aa4ea 3350{
dc01c3d1
DM
3351 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3352
896aa4ea
DD
3353 while (insn)
3354 {
3355 insn = PREV_INSN (insn);
3356 if (insn == 0 || !NOTE_P (insn))
3357 break;
3358 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3359 return NULL;
896aa4ea
DD
3360 }
3361
dc01c3d1 3362 return insn;
896aa4ea
DD
3363}
3364
b5b8b0ac
AO
3365/* Return the next insn after INSN that is not a DEBUG_INSN. This
3366 routine does not look inside SEQUENCEs. */
3367
eb51c837 3368rtx_insn *
dc01c3d1 3369next_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3370{
dc01c3d1
DM
3371 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3372
b5b8b0ac
AO
3373 while (insn)
3374 {
3375 insn = NEXT_INSN (insn);
3376 if (insn == 0 || !DEBUG_INSN_P (insn))
3377 break;
3378 }
3379
dc01c3d1 3380 return insn;
b5b8b0ac
AO
3381}
3382
3383/* Return the previous insn before INSN that is not a DEBUG_INSN.
3384 This routine does not look inside SEQUENCEs. */
3385
eb51c837 3386rtx_insn *
dc01c3d1 3387prev_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3388{
dc01c3d1
DM
3389 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3390
b5b8b0ac
AO
3391 while (insn)
3392 {
3393 insn = PREV_INSN (insn);
3394 if (insn == 0 || !DEBUG_INSN_P (insn))
3395 break;
3396 }
3397
dc01c3d1 3398 return insn;
b5b8b0ac
AO
3399}
3400
f0fc0803
JJ
3401/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3402 This routine does not look inside SEQUENCEs. */
3403
eb51c837 3404rtx_insn *
dc01c3d1 3405next_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3406{
dc01c3d1
DM
3407 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3408
f0fc0803
JJ
3409 while (insn)
3410 {
3411 insn = NEXT_INSN (insn);
3412 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3413 break;
3414 }
3415
dc01c3d1 3416 return insn;
f0fc0803
JJ
3417}
3418
3419/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3420 This routine does not look inside SEQUENCEs. */
3421
eb51c837 3422rtx_insn *
dc01c3d1 3423prev_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3424{
dc01c3d1
DM
3425 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3426
f0fc0803
JJ
3427 while (insn)
3428 {
3429 insn = PREV_INSN (insn);
3430 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3431 break;
3432 }
3433
dc01c3d1 3434 return insn;
f0fc0803
JJ
3435}
3436
23b2ce53
RS
3437/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3438 or 0, if there is none. This routine does not look inside
0f41302f 3439 SEQUENCEs. */
23b2ce53 3440
eb51c837 3441rtx_insn *
dc01c3d1 3442next_real_insn (rtx uncast_insn)
23b2ce53 3443{
dc01c3d1
DM
3444 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3445
75547801
KG
3446 while (insn)
3447 {
3448 insn = NEXT_INSN (insn);
3449 if (insn == 0 || INSN_P (insn))
3450 break;
3451 }
23b2ce53 3452
dc01c3d1 3453 return insn;
23b2ce53
RS
3454}
3455
3456/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3457 or 0, if there is none. This routine does not look inside
3458 SEQUENCEs. */
3459
eb51c837 3460rtx_insn *
dc01c3d1 3461prev_real_insn (rtx uncast_insn)
23b2ce53 3462{
dc01c3d1
DM
3463 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3464
75547801
KG
3465 while (insn)
3466 {
3467 insn = PREV_INSN (insn);
3468 if (insn == 0 || INSN_P (insn))
3469 break;
3470 }
23b2ce53 3471
dc01c3d1 3472 return insn;
23b2ce53
RS
3473}
3474
ee960939
OH
3475/* Return the last CALL_INSN in the current list, or 0 if there is none.
3476 This routine does not look inside SEQUENCEs. */
3477
049cfc4a 3478rtx_call_insn *
502b8322 3479last_call_insn (void)
ee960939 3480{
049cfc4a 3481 rtx_insn *insn;
ee960939
OH
3482
3483 for (insn = get_last_insn ();
4b4bf941 3484 insn && !CALL_P (insn);
ee960939
OH
3485 insn = PREV_INSN (insn))
3486 ;
3487
049cfc4a 3488 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3489}
3490
23b2ce53 3491/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3492 does not look inside SEQUENCEs. After reload this also skips over
3493 standalone USE and CLOBBER insn. */
23b2ce53 3494
69732dcb 3495int
4f588890 3496active_insn_p (const_rtx insn)
69732dcb 3497{
4b4bf941 3498 return (CALL_P (insn) || JUMP_P (insn)
39718607 3499 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3500 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3501 && (! reload_completed
3502 || (GET_CODE (PATTERN (insn)) != USE
3503 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3504}
3505
eb51c837 3506rtx_insn *
dc01c3d1 3507next_active_insn (rtx uncast_insn)
23b2ce53 3508{
dc01c3d1
DM
3509 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3510
75547801
KG
3511 while (insn)
3512 {
3513 insn = NEXT_INSN (insn);
3514 if (insn == 0 || active_insn_p (insn))
3515 break;
3516 }
23b2ce53 3517
dc01c3d1 3518 return insn;
23b2ce53
RS
3519}
3520
3521/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3522 does not look inside SEQUENCEs. After reload this also skips over
3523 standalone USE and CLOBBER insn. */
23b2ce53 3524
eb51c837 3525rtx_insn *
dc01c3d1 3526prev_active_insn (rtx uncast_insn)
23b2ce53 3527{
dc01c3d1
DM
3528 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3529
75547801
KG
3530 while (insn)
3531 {
3532 insn = PREV_INSN (insn);
3533 if (insn == 0 || active_insn_p (insn))
3534 break;
3535 }
23b2ce53 3536
dc01c3d1 3537 return insn;
23b2ce53 3538}
23b2ce53 3539\f
23b2ce53
RS
3540/* Return the next insn that uses CC0 after INSN, which is assumed to
3541 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3542 applied to the result of this function should yield INSN).
3543
3544 Normally, this is simply the next insn. However, if a REG_CC_USER note
3545 is present, it contains the insn that uses CC0.
3546
3547 Return 0 if we can't find the insn. */
3548
75b46023 3549rtx_insn *
dc01c3d1 3550next_cc0_user (rtx uncast_insn)
23b2ce53 3551{
dc01c3d1
DM
3552 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3553
906c4e36 3554 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3555
3556 if (note)
75b46023 3557 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3558
3559 insn = next_nonnote_insn (insn);
4b4bf941 3560 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3561 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3562
2c3c49de 3563 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3564 return insn;
23b2ce53
RS
3565
3566 return 0;
3567}
3568
3569/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3570 note, it is the previous insn. */
3571
75b46023 3572rtx_insn *
5c8db5b4 3573prev_cc0_setter (rtx_insn *insn)
23b2ce53 3574{
906c4e36 3575 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3576
3577 if (note)
75b46023 3578 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3579
3580 insn = prev_nonnote_insn (insn);
5b0264cb 3581 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3582
dc01c3d1 3583 return insn;
23b2ce53 3584}
e5bef2e4 3585
594f8779
RZ
3586/* Find a RTX_AUTOINC class rtx which matches DATA. */
3587
3588static int
9021b8ec 3589find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3590{
9021b8ec
RS
3591 subrtx_iterator::array_type array;
3592 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3593 {
9021b8ec
RS
3594 const_rtx x = *iter;
3595 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3596 && rtx_equal_p (reg, XEXP (x, 0)))
3597 return true;
594f8779 3598 }
9021b8ec 3599 return false;
594f8779 3600}
594f8779 3601
e5bef2e4
HB
3602/* Increment the label uses for all labels present in rtx. */
3603
3604static void
502b8322 3605mark_label_nuses (rtx x)
e5bef2e4 3606{
b3694847
SS
3607 enum rtx_code code;
3608 int i, j;
3609 const char *fmt;
e5bef2e4
HB
3610
3611 code = GET_CODE (x);
a827d9b1
DM
3612 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3613 LABEL_NUSES (LABEL_REF_LABEL (x))++;
e5bef2e4
HB
3614
3615 fmt = GET_RTX_FORMAT (code);
3616 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3617 {
3618 if (fmt[i] == 'e')
0fb7aeda 3619 mark_label_nuses (XEXP (x, i));
e5bef2e4 3620 else if (fmt[i] == 'E')
0fb7aeda 3621 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3622 mark_label_nuses (XVECEXP (x, i, j));
3623 }
3624}
3625
23b2ce53
RS
3626\f
3627/* Try splitting insns that can be split for better scheduling.
3628 PAT is the pattern which might split.
3629 TRIAL is the insn providing PAT.
cc2902df 3630 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3631
3632 If this routine succeeds in splitting, it returns the first or last
11147ebe 3633 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3634 returns TRIAL. If the insn to be returned can be split, it will be. */
3635
53f04688 3636rtx_insn *
bb5c4956 3637try_split (rtx pat, rtx_insn *trial, int last)
23b2ce53 3638{
53f04688
DM
3639 rtx_insn *before = PREV_INSN (trial);
3640 rtx_insn *after = NEXT_INSN (trial);
dc01c3d1
DM
3641 rtx note;
3642 rtx_insn *seq, *tem;
6b24c259 3643 int probability;
dc01c3d1 3644 rtx_insn *insn_last, *insn;
599aedd9 3645 int njumps = 0;
e67d1102 3646 rtx_insn *call_insn = NULL;
6b24c259 3647
cd9c1ca8
RH
3648 /* We're not good at redistributing frame information. */
3649 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3650 return trial;
cd9c1ca8 3651
6b24c259
JH
3652 if (any_condjump_p (trial)
3653 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
e5af9ddd 3654 split_branch_probability = XINT (note, 0);
6b24c259
JH
3655 probability = split_branch_probability;
3656
bb5c4956 3657 seq = split_insns (pat, trial);
6b24c259
JH
3658
3659 split_branch_probability = -1;
23b2ce53 3660
599aedd9 3661 if (!seq)
dc01c3d1 3662 return trial;
599aedd9
RH
3663
3664 /* Avoid infinite loop if any insn of the result matches
3665 the original pattern. */
3666 insn_last = seq;
3667 while (1)
23b2ce53 3668 {
599aedd9
RH
3669 if (INSN_P (insn_last)
3670 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3671 return trial;
599aedd9
RH
3672 if (!NEXT_INSN (insn_last))
3673 break;
3674 insn_last = NEXT_INSN (insn_last);
3675 }
750c9258 3676
6fb5fa3c
DB
3677 /* We will be adding the new sequence to the function. The splitters
3678 may have introduced invalid RTL sharing, so unshare the sequence now. */
3679 unshare_all_rtl_in_chain (seq);
3680
339ba33b 3681 /* Mark labels and copy flags. */
599aedd9
RH
3682 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3683 {
4b4bf941 3684 if (JUMP_P (insn))
599aedd9 3685 {
339ba33b
RS
3686 if (JUMP_P (trial))
3687 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3688 mark_jump_label (PATTERN (insn), insn, 0);
3689 njumps++;
3690 if (probability != -1
3691 && any_condjump_p (insn)
3692 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3693 {
599aedd9
RH
3694 /* We can preserve the REG_BR_PROB notes only if exactly
3695 one jump is created, otherwise the machine description
3696 is responsible for this step using
3697 split_branch_probability variable. */
5b0264cb 3698 gcc_assert (njumps == 1);
e5af9ddd 3699 add_int_reg_note (insn, REG_BR_PROB, probability);
2f937369 3700 }
599aedd9
RH
3701 }
3702 }
3703
3704 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3705 in SEQ and copy any additional information across. */
4b4bf941 3706 if (CALL_P (trial))
599aedd9
RH
3707 {
3708 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3709 if (CALL_P (insn))
599aedd9 3710 {
dc01c3d1
DM
3711 rtx_insn *next;
3712 rtx *p;
65712d5c 3713
4f660b15
RO
3714 gcc_assert (call_insn == NULL_RTX);
3715 call_insn = insn;
3716
65712d5c
RS
3717 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3718 target may have explicitly specified. */
3719 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3720 while (*p)
3721 p = &XEXP (*p, 1);
3722 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3723
3724 /* If the old call was a sibling call, the new one must
3725 be too. */
599aedd9 3726 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3727
3728 /* If the new call is the last instruction in the sequence,
3729 it will effectively replace the old call in-situ. Otherwise
3730 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3731 so that it comes immediately after the new call. */
3732 if (NEXT_INSN (insn))
65f3dedb
RS
3733 for (next = NEXT_INSN (trial);
3734 next && NOTE_P (next);
3735 next = NEXT_INSN (next))
3736 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3737 {
3738 remove_insn (next);
3739 add_insn_after (next, insn, NULL);
65f3dedb 3740 break;
65712d5c 3741 }
599aedd9
RH
3742 }
3743 }
4b5e8abe 3744
599aedd9
RH
3745 /* Copy notes, particularly those related to the CFG. */
3746 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3747 {
3748 switch (REG_NOTE_KIND (note))
3749 {
3750 case REG_EH_REGION:
1d65f45c 3751 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3752 break;
216183ce 3753
599aedd9
RH
3754 case REG_NORETURN:
3755 case REG_SETJMP:
0a35513e 3756 case REG_TM:
594f8779 3757 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3758 {
4b4bf941 3759 if (CALL_P (insn))
65c5f2a6 3760 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3761 }
599aedd9 3762 break;
d6e95df8 3763
599aedd9 3764 case REG_NON_LOCAL_GOTO:
594f8779 3765 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3766 {
4b4bf941 3767 if (JUMP_P (insn))
65c5f2a6 3768 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3769 }
599aedd9 3770 break;
e5bef2e4 3771
594f8779 3772 case REG_INC:
760edf20
TS
3773 if (!AUTO_INC_DEC)
3774 break;
3775
594f8779
RZ
3776 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3777 {
3778 rtx reg = XEXP (note, 0);
3779 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3780 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3781 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3782 }
3783 break;
594f8779 3784
9a08d230 3785 case REG_ARGS_SIZE:
e5b51ca0 3786 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
9a08d230
RH
3787 break;
3788
4f660b15
RO
3789 case REG_CALL_DECL:
3790 gcc_assert (call_insn != NULL_RTX);
3791 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3792 break;
3793
599aedd9
RH
3794 default:
3795 break;
23b2ce53 3796 }
599aedd9
RH
3797 }
3798
3799 /* If there are LABELS inside the split insns increment the
3800 usage count so we don't delete the label. */
cf7c4aa6 3801 if (INSN_P (trial))
599aedd9
RH
3802 {
3803 insn = insn_last;
3804 while (insn != NULL_RTX)
23b2ce53 3805 {
cf7c4aa6 3806 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3807 if (NONJUMP_INSN_P (insn))
599aedd9 3808 mark_label_nuses (PATTERN (insn));
23b2ce53 3809
599aedd9
RH
3810 insn = PREV_INSN (insn);
3811 }
23b2ce53
RS
3812 }
3813
5368224f 3814 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3815
3816 delete_insn (trial);
599aedd9
RH
3817
3818 /* Recursively call try_split for each new insn created; by the
3819 time control returns here that insn will be fully split, so
3820 set LAST and continue from the insn after the one returned.
3821 We can't use next_active_insn here since AFTER may be a note.
3822 Ignore deleted insns, which can be occur if not optimizing. */
3823 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4654c0cf 3824 if (! tem->deleted () && INSN_P (tem))
599aedd9
RH
3825 tem = try_split (PATTERN (tem), tem, 1);
3826
3827 /* Return either the first or the last insn, depending on which was
3828 requested. */
3829 return last
5936d944 3830 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3831 : NEXT_INSN (before);
23b2ce53
RS
3832}
3833\f
3834/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3835 Store PATTERN in the pattern slots. */
23b2ce53 3836
167b9fae 3837rtx_insn *
502b8322 3838make_insn_raw (rtx pattern)
23b2ce53 3839{
167b9fae 3840 rtx_insn *insn;
23b2ce53 3841
167b9fae 3842 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 3843
43127294 3844 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3845 PATTERN (insn) = pattern;
3846 INSN_CODE (insn) = -1;
1632afca 3847 REG_NOTES (insn) = NULL;
5368224f 3848 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3849 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3850
47984720
NC
3851#ifdef ENABLE_RTL_CHECKING
3852 if (insn
2c3c49de 3853 && INSN_P (insn)
47984720
NC
3854 && (returnjump_p (insn)
3855 || (GET_CODE (insn) == SET
3856 && SET_DEST (insn) == pc_rtx)))
3857 {
d4ee4d25 3858 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3859 debug_rtx (insn);
3860 }
3861#endif
750c9258 3862
23b2ce53
RS
3863 return insn;
3864}
3865
b5b8b0ac
AO
3866/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3867
167b9fae 3868static rtx_insn *
b5b8b0ac
AO
3869make_debug_insn_raw (rtx pattern)
3870{
167b9fae 3871 rtx_debug_insn *insn;
b5b8b0ac 3872
167b9fae 3873 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
3874 INSN_UID (insn) = cur_debug_insn_uid++;
3875 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3876 INSN_UID (insn) = cur_insn_uid++;
3877
3878 PATTERN (insn) = pattern;
3879 INSN_CODE (insn) = -1;
3880 REG_NOTES (insn) = NULL;
5368224f 3881 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3882 BLOCK_FOR_INSN (insn) = NULL;
3883
3884 return insn;
3885}
3886
2f937369 3887/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3888
167b9fae 3889static rtx_insn *
502b8322 3890make_jump_insn_raw (rtx pattern)
23b2ce53 3891{
167b9fae 3892 rtx_jump_insn *insn;
23b2ce53 3893
167b9fae 3894 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 3895 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3896
3897 PATTERN (insn) = pattern;
3898 INSN_CODE (insn) = -1;
1632afca
RS
3899 REG_NOTES (insn) = NULL;
3900 JUMP_LABEL (insn) = NULL;
5368224f 3901 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3902 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3903
3904 return insn;
3905}
aff507f4 3906
2f937369 3907/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 3908
167b9fae 3909static rtx_insn *
502b8322 3910make_call_insn_raw (rtx pattern)
aff507f4 3911{
167b9fae 3912 rtx_call_insn *insn;
aff507f4 3913
167b9fae 3914 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
3915 INSN_UID (insn) = cur_insn_uid++;
3916
3917 PATTERN (insn) = pattern;
3918 INSN_CODE (insn) = -1;
aff507f4
RK
3919 REG_NOTES (insn) = NULL;
3920 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3921 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3922 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3923
3924 return insn;
3925}
96fba521
SB
3926
3927/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3928
66e8df53 3929static rtx_note *
96fba521
SB
3930make_note_raw (enum insn_note subtype)
3931{
3932 /* Some notes are never created this way at all. These notes are
3933 only created by patching out insns. */
3934 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3935 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3936
66e8df53 3937 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
3938 INSN_UID (note) = cur_insn_uid++;
3939 NOTE_KIND (note) = subtype;
3940 BLOCK_FOR_INSN (note) = NULL;
3941 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3942 return note;
3943}
23b2ce53 3944\f
96fba521
SB
3945/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3946 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3947 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3948
3949static inline void
9152e0aa 3950link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 3951{
0f82e5c9
DM
3952 SET_PREV_INSN (insn) = prev;
3953 SET_NEXT_INSN (insn) = next;
96fba521
SB
3954 if (prev != NULL)
3955 {
0f82e5c9 3956 SET_NEXT_INSN (prev) = insn;
96fba521
SB
3957 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3958 {
e6eda746
DM
3959 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3960 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
3961 }
3962 }
3963 if (next != NULL)
3964 {
0f82e5c9 3965 SET_PREV_INSN (next) = insn;
96fba521 3966 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
3967 {
3968 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3969 SET_PREV_INSN (sequence->insn (0)) = insn;
3970 }
96fba521 3971 }
3ccb989e
SB
3972
3973 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3974 {
e6eda746
DM
3975 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3976 SET_PREV_INSN (sequence->insn (0)) = prev;
3977 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 3978 }
96fba521
SB
3979}
3980
23b2ce53
RS
3981/* Add INSN to the end of the doubly-linked list.
3982 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3983
3984void
9152e0aa 3985add_insn (rtx_insn *insn)
23b2ce53 3986{
9152e0aa 3987 rtx_insn *prev = get_last_insn ();
96fba521 3988 link_insn_into_chain (insn, prev, NULL);
5936d944
JH
3989 if (NULL == get_insns ())
3990 set_first_insn (insn);
5936d944 3991 set_last_insn (insn);
23b2ce53
RS
3992}
3993
96fba521 3994/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 3995
96fba521 3996static void
9152e0aa 3997add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 3998{
9152e0aa 3999 rtx_insn *next = NEXT_INSN (after);
23b2ce53 4000
4654c0cf 4001 gcc_assert (!optimize || !after->deleted ());
ba213285 4002
96fba521 4003 link_insn_into_chain (insn, after, next);
23b2ce53 4004
96fba521 4005 if (next == NULL)
23b2ce53 4006 {
614d5bd8
AM
4007 struct sequence_stack *seq;
4008
4009 for (seq = get_current_sequence (); seq; seq = seq->next)
4010 if (after == seq->last)
4011 {
4012 seq->last = insn;
4013 break;
4014 }
23b2ce53 4015 }
96fba521
SB
4016}
4017
4018/* Add INSN into the doubly-linked list before insn BEFORE. */
4019
4020static void
9152e0aa 4021add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4022{
9152e0aa 4023 rtx_insn *prev = PREV_INSN (before);
96fba521 4024
4654c0cf 4025 gcc_assert (!optimize || !before->deleted ());
96fba521
SB
4026
4027 link_insn_into_chain (insn, prev, before);
4028
4029 if (prev == NULL)
23b2ce53 4030 {
614d5bd8 4031 struct sequence_stack *seq;
a0ae8e8d 4032
614d5bd8
AM
4033 for (seq = get_current_sequence (); seq; seq = seq->next)
4034 if (before == seq->first)
4035 {
4036 seq->first = insn;
4037 break;
4038 }
4039
4040 gcc_assert (seq);
23b2ce53 4041 }
96fba521
SB
4042}
4043
4044/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4045 If BB is NULL, an attempt is made to infer the bb from before.
4046
4047 This and the next function should be the only functions called
4048 to insert an insn once delay slots have been filled since only
4049 they know how to update a SEQUENCE. */
23b2ce53 4050
96fba521 4051void
9152e0aa 4052add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
96fba521 4053{
1130d5e3 4054 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
9152e0aa 4055 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
96fba521 4056 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4057 if (!BARRIER_P (after)
4058 && !BARRIER_P (insn)
3c030e88
JH
4059 && (bb = BLOCK_FOR_INSN (after)))
4060 {
4061 set_block_for_insn (insn, bb);
38c1593d 4062 if (INSN_P (insn))
6fb5fa3c 4063 df_insn_rescan (insn);
3c030e88 4064 /* Should not happen as first in the BB is always
a1f300c0 4065 either NOTE or LABEL. */
a813c111 4066 if (BB_END (bb) == after
3c030e88 4067 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4068 && !BARRIER_P (insn)
a38e7aa5 4069 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4070 BB_END (bb) = insn;
3c030e88 4071 }
23b2ce53
RS
4072}
4073
96fba521
SB
4074/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4075 If BB is NULL, an attempt is made to infer the bb from before.
4076
4077 This and the previous function should be the only functions called
4078 to insert an insn once delay slots have been filled since only
4079 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4080
4081void
9152e0aa 4082add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
a0ae8e8d 4083{
9152e0aa
DM
4084 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4085 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
96fba521 4086 add_insn_before_nobb (insn, before);
a0ae8e8d 4087
b8698a0f 4088 if (!bb
6fb5fa3c
DB
4089 && !BARRIER_P (before)
4090 && !BARRIER_P (insn))
4091 bb = BLOCK_FOR_INSN (before);
4092
4093 if (bb)
3c030e88
JH
4094 {
4095 set_block_for_insn (insn, bb);
38c1593d 4096 if (INSN_P (insn))
6fb5fa3c 4097 df_insn_rescan (insn);
5b0264cb 4098 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4099 LABEL. */
5b0264cb
NS
4100 gcc_assert (BB_HEAD (bb) != insn
4101 /* Avoid clobbering of structure when creating new BB. */
4102 || BARRIER_P (insn)
a38e7aa5 4103 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4104 }
a0ae8e8d
RK
4105}
4106
6fb5fa3c
DB
4107/* Replace insn with an deleted instruction note. */
4108
0ce2b299
EB
4109void
4110set_insn_deleted (rtx insn)
6fb5fa3c 4111{
39718607 4112 if (INSN_P (insn))
b2908ba6 4113 df_insn_delete (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
4114 PUT_CODE (insn, NOTE);
4115 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4116}
4117
4118
1f397f45
SB
4119/* Unlink INSN from the insn chain.
4120
4121 This function knows how to handle sequences.
4122
4123 This function does not invalidate data flow information associated with
4124 INSN (i.e. does not call df_insn_delete). That makes this function
4125 usable for only disconnecting an insn from the chain, and re-emit it
4126 elsewhere later.
4127
4128 To later insert INSN elsewhere in the insn chain via add_insn and
4129 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4130 the caller. Nullifying them here breaks many insn chain walks.
4131
4132 To really delete an insn and related DF information, use delete_insn. */
4133
89e99eea 4134void
dc01c3d1 4135remove_insn (rtx uncast_insn)
89e99eea 4136{
dc01c3d1 4137 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1130d5e3
DM
4138 rtx_insn *next = NEXT_INSN (insn);
4139 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4140 basic_block bb;
4141
89e99eea
DB
4142 if (prev)
4143 {
0f82e5c9 4144 SET_NEXT_INSN (prev) = next;
4b4bf941 4145 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4146 {
e6eda746
DM
4147 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4148 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4149 }
4150 }
89e99eea
DB
4151 else
4152 {
614d5bd8
AM
4153 struct sequence_stack *seq;
4154
4155 for (seq = get_current_sequence (); seq; seq = seq->next)
4156 if (insn == seq->first)
89e99eea 4157 {
614d5bd8 4158 seq->first = next;
89e99eea
DB
4159 break;
4160 }
4161
614d5bd8 4162 gcc_assert (seq);
89e99eea
DB
4163 }
4164
4165 if (next)
4166 {
0f82e5c9 4167 SET_PREV_INSN (next) = prev;
4b4bf941 4168 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4169 {
4170 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4171 SET_PREV_INSN (sequence->insn (0)) = prev;
4172 }
89e99eea 4173 }
89e99eea
DB
4174 else
4175 {
614d5bd8
AM
4176 struct sequence_stack *seq;
4177
4178 for (seq = get_current_sequence (); seq; seq = seq->next)
4179 if (insn == seq->last)
89e99eea 4180 {
614d5bd8 4181 seq->last = prev;
89e99eea
DB
4182 break;
4183 }
4184
614d5bd8 4185 gcc_assert (seq);
89e99eea 4186 }
80eb8028 4187
80eb8028 4188 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4189 if (!BARRIER_P (insn)
53c17031
JH
4190 && (bb = BLOCK_FOR_INSN (insn)))
4191 {
a813c111 4192 if (BB_HEAD (bb) == insn)
53c17031 4193 {
3bf1e984
RK
4194 /* Never ever delete the basic block note without deleting whole
4195 basic block. */
5b0264cb 4196 gcc_assert (!NOTE_P (insn));
1130d5e3 4197 BB_HEAD (bb) = next;
53c17031 4198 }
a813c111 4199 if (BB_END (bb) == insn)
1130d5e3 4200 BB_END (bb) = prev;
53c17031 4201 }
89e99eea
DB
4202}
4203
ee960939
OH
4204/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4205
4206void
502b8322 4207add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4208{
5b0264cb 4209 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4210
4211 /* Put the register usage information on the CALL. If there is already
4212 some usage information, put ours at the end. */
4213 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4214 {
4215 rtx link;
4216
4217 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4218 link = XEXP (link, 1))
4219 ;
4220
4221 XEXP (link, 1) = call_fusage;
4222 }
4223 else
4224 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4225}
4226
23b2ce53
RS
4227/* Delete all insns made since FROM.
4228 FROM becomes the new last instruction. */
4229
4230void
fee3e72c 4231delete_insns_since (rtx_insn *from)
23b2ce53
RS
4232{
4233 if (from == 0)
5936d944 4234 set_first_insn (0);
23b2ce53 4235 else
0f82e5c9 4236 SET_NEXT_INSN (from) = 0;
5936d944 4237 set_last_insn (from);
23b2ce53
RS
4238}
4239
5dab5552
MS
4240/* This function is deprecated, please use sequences instead.
4241
4242 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4243 The insns to be moved are those between FROM and TO.
4244 They are moved to a new position after the insn AFTER.
4245 AFTER must not be FROM or TO or any insn in between.
4246
4247 This function does not know about SEQUENCEs and hence should not be
4248 called after delay-slot filling has been done. */
4249
4250void
fee3e72c 4251reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4252{
b2b29377
MM
4253 if (flag_checking)
4254 {
4255 for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4256 gcc_assert (after != x);
4257 gcc_assert (after != to);
4258 }
4f8344eb 4259
23b2ce53
RS
4260 /* Splice this bunch out of where it is now. */
4261 if (PREV_INSN (from))
0f82e5c9 4262 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4263 if (NEXT_INSN (to))
0f82e5c9 4264 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4265 if (get_last_insn () == to)
4266 set_last_insn (PREV_INSN (from));
4267 if (get_insns () == from)
4268 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4269
4270 /* Make the new neighbors point to it and it to them. */
4271 if (NEXT_INSN (after))
0f82e5c9 4272 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4273
0f82e5c9
DM
4274 SET_NEXT_INSN (to) = NEXT_INSN (after);
4275 SET_PREV_INSN (from) = after;
4276 SET_NEXT_INSN (after) = from;
c3284718 4277 if (after == get_last_insn ())
5936d944 4278 set_last_insn (to);
23b2ce53
RS
4279}
4280
3c030e88
JH
4281/* Same as function above, but take care to update BB boundaries. */
4282void
ac9d2d2c 4283reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4284{
ac9d2d2c 4285 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4286 basic_block bb, bb2;
4287
4288 reorder_insns_nobb (from, to, after);
4289
4b4bf941 4290 if (!BARRIER_P (after)
3c030e88
JH
4291 && (bb = BLOCK_FOR_INSN (after)))
4292 {
b2908ba6 4293 rtx_insn *x;
6fb5fa3c 4294 df_set_bb_dirty (bb);
68252e27 4295
4b4bf941 4296 if (!BARRIER_P (from)
3c030e88
JH
4297 && (bb2 = BLOCK_FOR_INSN (from)))
4298 {
a813c111 4299 if (BB_END (bb2) == to)
1130d5e3 4300 BB_END (bb2) = prev;
6fb5fa3c 4301 df_set_bb_dirty (bb2);
3c030e88
JH
4302 }
4303
a813c111 4304 if (BB_END (bb) == after)
1130d5e3 4305 BB_END (bb) = to;
3c030e88
JH
4306
4307 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4308 if (!BARRIER_P (x))
63642d5a 4309 df_insn_change_bb (x, bb);
3c030e88
JH
4310 }
4311}
4312
23b2ce53 4313\f
2f937369
DM
4314/* Emit insn(s) of given code and pattern
4315 at a specified place within the doubly-linked list.
23b2ce53 4316
2f937369
DM
4317 All of the emit_foo global entry points accept an object
4318 X which is either an insn list or a PATTERN of a single
4319 instruction.
23b2ce53 4320
2f937369
DM
4321 There are thus a few canonical ways to generate code and
4322 emit it at a specific place in the instruction stream. For
4323 example, consider the instruction named SPOT and the fact that
4324 we would like to emit some instructions before SPOT. We might
4325 do it like this:
23b2ce53 4326
2f937369
DM
4327 start_sequence ();
4328 ... emit the new instructions ...
4329 insns_head = get_insns ();
4330 end_sequence ();
23b2ce53 4331
2f937369 4332 emit_insn_before (insns_head, SPOT);
23b2ce53 4333
2f937369
DM
4334 It used to be common to generate SEQUENCE rtl instead, but that
4335 is a relic of the past which no longer occurs. The reason is that
4336 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4337 generated would almost certainly die right after it was created. */
23b2ce53 4338
cd459bf8 4339static rtx_insn *
5f02387d 4340emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
167b9fae 4341 rtx_insn *(*make_raw) (rtx))
23b2ce53 4342{
167b9fae 4343 rtx_insn *insn;
23b2ce53 4344
5b0264cb 4345 gcc_assert (before);
2f937369
DM
4346
4347 if (x == NULL_RTX)
cd459bf8 4348 return safe_as_a <rtx_insn *> (last);
2f937369
DM
4349
4350 switch (GET_CODE (x))
23b2ce53 4351 {
b5b8b0ac 4352 case DEBUG_INSN:
2f937369
DM
4353 case INSN:
4354 case JUMP_INSN:
4355 case CALL_INSN:
4356 case CODE_LABEL:
4357 case BARRIER:
4358 case NOTE:
167b9fae 4359 insn = as_a <rtx_insn *> (x);
2f937369
DM
4360 while (insn)
4361 {
167b9fae 4362 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4363 add_insn_before (insn, before, bb);
2f937369
DM
4364 last = insn;
4365 insn = next;
4366 }
4367 break;
4368
4369#ifdef ENABLE_RTL_CHECKING
4370 case SEQUENCE:
5b0264cb 4371 gcc_unreachable ();
2f937369
DM
4372 break;
4373#endif
4374
4375 default:
5f02387d 4376 last = (*make_raw) (x);
6fb5fa3c 4377 add_insn_before (last, before, bb);
2f937369 4378 break;
23b2ce53
RS
4379 }
4380
cd459bf8 4381 return safe_as_a <rtx_insn *> (last);
23b2ce53
RS
4382}
4383
5f02387d
NF
4384/* Make X be output before the instruction BEFORE. */
4385
cd459bf8 4386rtx_insn *
596f2b17 4387emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f02387d
NF
4388{
4389 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4390}
4391
2f937369 4392/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4393 and output it before the instruction BEFORE. */
4394
1476d1bd 4395rtx_jump_insn *
596f2b17 4396emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
23b2ce53 4397{
1476d1bd
MM
4398 return as_a <rtx_jump_insn *> (
4399 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4400 make_jump_insn_raw));
23b2ce53
RS
4401}
4402
2f937369 4403/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4404 and output it before the instruction BEFORE. */
4405
cd459bf8 4406rtx_insn *
596f2b17 4407emit_call_insn_before_noloc (rtx x, rtx_insn *before)
969d70ca 4408{
5f02387d
NF
4409 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4410 make_call_insn_raw);
969d70ca
JH
4411}
4412
b5b8b0ac
AO
4413/* Make an instruction with body X and code DEBUG_INSN
4414 and output it before the instruction BEFORE. */
4415
cd459bf8 4416rtx_insn *
b5b8b0ac
AO
4417emit_debug_insn_before_noloc (rtx x, rtx before)
4418{
5f02387d
NF
4419 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4420 make_debug_insn_raw);
b5b8b0ac
AO
4421}
4422
23b2ce53 4423/* Make an insn of code BARRIER
e881bb1b 4424 and output it before the insn BEFORE. */
23b2ce53 4425
cd459bf8 4426rtx_barrier *
502b8322 4427emit_barrier_before (rtx before)
23b2ce53 4428{
cd459bf8 4429 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4430
4431 INSN_UID (insn) = cur_insn_uid++;
4432
6fb5fa3c 4433 add_insn_before (insn, before, NULL);
23b2ce53
RS
4434 return insn;
4435}
4436
e881bb1b
RH
4437/* Emit the label LABEL before the insn BEFORE. */
4438
1476d1bd 4439rtx_code_label *
596f2b17 4440emit_label_before (rtx label, rtx_insn *before)
e881bb1b 4441{
468660d3
SB
4442 gcc_checking_assert (INSN_UID (label) == 0);
4443 INSN_UID (label) = cur_insn_uid++;
4444 add_insn_before (label, before, NULL);
1476d1bd 4445 return as_a <rtx_code_label *> (label);
e881bb1b 4446}
23b2ce53 4447\f
2f937369
DM
4448/* Helper for emit_insn_after, handles lists of instructions
4449 efficiently. */
23b2ce53 4450
e6eda746
DM
4451static rtx_insn *
4452emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
23b2ce53 4453{
e6eda746 4454 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
1130d5e3
DM
4455 rtx_insn *last;
4456 rtx_insn *after_after;
6fb5fa3c
DB
4457 if (!bb && !BARRIER_P (after))
4458 bb = BLOCK_FOR_INSN (after);
23b2ce53 4459
6fb5fa3c 4460 if (bb)
23b2ce53 4461 {
6fb5fa3c 4462 df_set_bb_dirty (bb);
2f937369 4463 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4464 if (!BARRIER_P (last))
6fb5fa3c
DB
4465 {
4466 set_block_for_insn (last, bb);
4467 df_insn_rescan (last);
4468 }
4b4bf941 4469 if (!BARRIER_P (last))
6fb5fa3c
DB
4470 {
4471 set_block_for_insn (last, bb);
4472 df_insn_rescan (last);
4473 }
a813c111 4474 if (BB_END (bb) == after)
1130d5e3 4475 BB_END (bb) = last;
23b2ce53
RS
4476 }
4477 else
2f937369
DM
4478 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4479 continue;
4480
4481 after_after = NEXT_INSN (after);
4482
0f82e5c9
DM
4483 SET_NEXT_INSN (after) = first;
4484 SET_PREV_INSN (first) = after;
4485 SET_NEXT_INSN (last) = after_after;
2f937369 4486 if (after_after)
0f82e5c9 4487 SET_PREV_INSN (after_after) = last;
2f937369 4488
c3284718 4489 if (after == get_last_insn ())
5936d944 4490 set_last_insn (last);
e855c69d 4491
2f937369
DM
4492 return last;
4493}
4494
cd459bf8 4495static rtx_insn *
e6eda746 4496emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
167b9fae 4497 rtx_insn *(*make_raw)(rtx))
2f937369 4498{
e6eda746
DM
4499 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4500 rtx_insn *last = after;
2f937369 4501
5b0264cb 4502 gcc_assert (after);
2f937369
DM
4503
4504 if (x == NULL_RTX)
e6eda746 4505 return last;
2f937369
DM
4506
4507 switch (GET_CODE (x))
23b2ce53 4508 {
b5b8b0ac 4509 case DEBUG_INSN:
2f937369
DM
4510 case INSN:
4511 case JUMP_INSN:
4512 case CALL_INSN:
4513 case CODE_LABEL:
4514 case BARRIER:
4515 case NOTE:
1130d5e3 4516 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4517 break;
4518
4519#ifdef ENABLE_RTL_CHECKING
4520 case SEQUENCE:
5b0264cb 4521 gcc_unreachable ();
2f937369
DM
4522 break;
4523#endif
4524
4525 default:
5f02387d 4526 last = (*make_raw) (x);
6fb5fa3c 4527 add_insn_after (last, after, bb);
2f937369 4528 break;
23b2ce53
RS
4529 }
4530
e6eda746 4531 return last;
23b2ce53
RS
4532}
4533
5f02387d
NF
4534/* Make X be output after the insn AFTER and set the BB of insn. If
4535 BB is NULL, an attempt is made to infer the BB from AFTER. */
4536
cd459bf8 4537rtx_insn *
5f02387d
NF
4538emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4539{
4540 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4541}
4542
255680cf 4543
2f937369 4544/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4545 and output it after the insn AFTER. */
4546
1476d1bd 4547rtx_jump_insn *
a7102479 4548emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4549{
1476d1bd
MM
4550 return as_a <rtx_jump_insn *> (
4551 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
2f937369
DM
4552}
4553
4554/* Make an instruction with body X and code CALL_INSN
4555 and output it after the instruction AFTER. */
4556
cd459bf8 4557rtx_insn *
a7102479 4558emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4559{
5f02387d 4560 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4561}
4562
b5b8b0ac
AO
4563/* Make an instruction with body X and code CALL_INSN
4564 and output it after the instruction AFTER. */
4565
cd459bf8 4566rtx_insn *
b5b8b0ac
AO
4567emit_debug_insn_after_noloc (rtx x, rtx after)
4568{
5f02387d 4569 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4570}
4571
23b2ce53
RS
4572/* Make an insn of code BARRIER
4573 and output it after the insn AFTER. */
4574
cd459bf8 4575rtx_barrier *
502b8322 4576emit_barrier_after (rtx after)
23b2ce53 4577{
cd459bf8 4578 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4579
4580 INSN_UID (insn) = cur_insn_uid++;
4581
6fb5fa3c 4582 add_insn_after (insn, after, NULL);
23b2ce53
RS
4583 return insn;
4584}
4585
4586/* Emit the label LABEL after the insn AFTER. */
4587
cd459bf8 4588rtx_insn *
596f2b17 4589emit_label_after (rtx label, rtx_insn *after)
23b2ce53 4590{
468660d3
SB
4591 gcc_checking_assert (INSN_UID (label) == 0);
4592 INSN_UID (label) = cur_insn_uid++;
4593 add_insn_after (label, after, NULL);
cd459bf8 4594 return as_a <rtx_insn *> (label);
23b2ce53 4595}
96fba521
SB
4596\f
4597/* Notes require a bit of special handling: Some notes need to have their
4598 BLOCK_FOR_INSN set, others should never have it set, and some should
4599 have it set or clear depending on the context. */
4600
4601/* Return true iff a note of kind SUBTYPE should be emitted with routines
4602 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4603 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4604
4605static bool
4606note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4607{
4608 switch (subtype)
4609 {
4610 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4611 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4612 return true;
4613
4614 /* Notes for var tracking and EH region markers can appear between or
4615 inside basic blocks. If the caller is emitting on the basic block
4616 boundary, do not set BLOCK_FOR_INSN on the new note. */
4617 case NOTE_INSN_VAR_LOCATION:
4618 case NOTE_INSN_CALL_ARG_LOCATION:
4619 case NOTE_INSN_EH_REGION_BEG:
4620 case NOTE_INSN_EH_REGION_END:
4621 return on_bb_boundary_p;
4622
4623 /* Otherwise, BLOCK_FOR_INSN must be set. */
4624 default:
4625 return false;
4626 }
4627}
23b2ce53
RS
4628
4629/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4630
66e8df53 4631rtx_note *
589e43f9 4632emit_note_after (enum insn_note subtype, rtx_insn *after)
23b2ce53 4633{
66e8df53 4634 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4635 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4636 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4637
4638 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4639 add_insn_after_nobb (note, after);
4640 else
4641 add_insn_after (note, after, bb);
4642 return note;
4643}
4644
4645/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4646
66e8df53 4647rtx_note *
89b6250d 4648emit_note_before (enum insn_note subtype, rtx_insn *before)
96fba521 4649{
66e8df53 4650 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4651 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4652 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4653
4654 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4655 add_insn_before_nobb (note, before);
4656 else
4657 add_insn_before (note, before, bb);
23b2ce53
RS
4658 return note;
4659}
23b2ce53 4660\f
e8110d6f
NF
4661/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4662 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4663
cd459bf8 4664static rtx_insn *
dc01c3d1 4665emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
167b9fae 4666 rtx_insn *(*make_raw) (rtx))
0d682900 4667{
dc01c3d1 4668 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
e67d1102 4669 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4670
a7102479 4671 if (pattern == NULL_RTX || !loc)
e67d1102 4672 return last;
dd3adcf8 4673
2f937369
DM
4674 after = NEXT_INSN (after);
4675 while (1)
4676 {
20d4397a
EB
4677 if (active_insn_p (after)
4678 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4679 && !INSN_LOCATION (after))
5368224f 4680 INSN_LOCATION (after) = loc;
2f937369
DM
4681 if (after == last)
4682 break;
4683 after = NEXT_INSN (after);
4684 }
e67d1102 4685 return last;
0d682900
JH
4686}
4687
e8110d6f
NF
4688/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4689 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4690 any DEBUG_INSNs. */
4691
cd459bf8 4692static rtx_insn *
dc01c3d1 4693emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
167b9fae 4694 rtx_insn *(*make_raw) (rtx))
a7102479 4695{
dc01c3d1
DM
4696 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4697 rtx_insn *prev = after;
b5b8b0ac 4698
e8110d6f
NF
4699 if (skip_debug_insns)
4700 while (DEBUG_INSN_P (prev))
4701 prev = PREV_INSN (prev);
b5b8b0ac
AO
4702
4703 if (INSN_P (prev))
5368224f 4704 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4705 make_raw);
a7102479 4706 else
e8110d6f 4707 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4708}
4709
5368224f 4710/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4711rtx_insn *
e8110d6f 4712emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4713{
e8110d6f
NF
4714 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4715}
2f937369 4716
5368224f 4717/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4718rtx_insn *
e8110d6f
NF
4719emit_insn_after (rtx pattern, rtx after)
4720{
4721 return emit_pattern_after (pattern, after, true, make_insn_raw);
4722}
dd3adcf8 4723
5368224f 4724/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4725rtx_jump_insn *
e8110d6f
NF
4726emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4727{
1476d1bd
MM
4728 return as_a <rtx_jump_insn *> (
4729 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
0d682900
JH
4730}
4731
5368224f 4732/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
1476d1bd 4733rtx_jump_insn *
a7102479
JH
4734emit_jump_insn_after (rtx pattern, rtx after)
4735{
1476d1bd
MM
4736 return as_a <rtx_jump_insn *> (
4737 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
a7102479
JH
4738}
4739
5368224f 4740/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4741rtx_insn *
502b8322 4742emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4743{
e8110d6f 4744 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4745}
4746
5368224f 4747/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4748rtx_insn *
a7102479
JH
4749emit_call_insn_after (rtx pattern, rtx after)
4750{
e8110d6f 4751 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4752}
4753
5368224f 4754/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4755rtx_insn *
b5b8b0ac
AO
4756emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4757{
e8110d6f 4758 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4759}
4760
5368224f 4761/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4762rtx_insn *
b5b8b0ac
AO
4763emit_debug_insn_after (rtx pattern, rtx after)
4764{
e8110d6f 4765 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4766}
4767
e8110d6f
NF
4768/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4769 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4770 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4771 CALL_INSN, etc. */
4772
cd459bf8 4773static rtx_insn *
dc01c3d1 4774emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
167b9fae 4775 rtx_insn *(*make_raw) (rtx))
0d682900 4776{
dc01c3d1
DM
4777 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4778 rtx_insn *first = PREV_INSN (before);
4779 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4780 insnp ? before : NULL_RTX,
4781 NULL, make_raw);
a7102479
JH
4782
4783 if (pattern == NULL_RTX || !loc)
dc01c3d1 4784 return last;
a7102479 4785
26cb3993
JH
4786 if (!first)
4787 first = get_insns ();
4788 else
4789 first = NEXT_INSN (first);
a7102479
JH
4790 while (1)
4791 {
20d4397a
EB
4792 if (active_insn_p (first)
4793 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4794 && !INSN_LOCATION (first))
5368224f 4795 INSN_LOCATION (first) = loc;
a7102479
JH
4796 if (first == last)
4797 break;
4798 first = NEXT_INSN (first);
4799 }
dc01c3d1 4800 return last;
a7102479
JH
4801}
4802
e8110d6f
NF
4803/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4804 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4805 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4806 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4807
cd459bf8 4808static rtx_insn *
dc01c3d1 4809emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
167b9fae 4810 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 4811{
dc01c3d1
DM
4812 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4813 rtx_insn *next = before;
b5b8b0ac 4814
e8110d6f
NF
4815 if (skip_debug_insns)
4816 while (DEBUG_INSN_P (next))
4817 next = PREV_INSN (next);
b5b8b0ac
AO
4818
4819 if (INSN_P (next))
5368224f 4820 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4821 insnp, make_raw);
a7102479 4822 else
e8110d6f
NF
4823 return emit_pattern_before_noloc (pattern, before,
4824 insnp ? before : NULL_RTX,
4825 NULL, make_raw);
a7102479
JH
4826}
4827
5368224f 4828/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4829rtx_insn *
596f2b17 4830emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4831{
e8110d6f
NF
4832 return emit_pattern_before_setloc (pattern, before, loc, true,
4833 make_insn_raw);
4834}
a7102479 4835
5368224f 4836/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4837rtx_insn *
e8110d6f
NF
4838emit_insn_before (rtx pattern, rtx before)
4839{
4840 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4841}
a7102479 4842
5368224f 4843/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4844rtx_jump_insn *
596f2b17 4845emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
e8110d6f 4846{
1476d1bd
MM
4847 return as_a <rtx_jump_insn *> (
4848 emit_pattern_before_setloc (pattern, before, loc, false,
4849 make_jump_insn_raw));
a7102479
JH
4850}
4851
5368224f 4852/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
1476d1bd 4853rtx_jump_insn *
a7102479
JH
4854emit_jump_insn_before (rtx pattern, rtx before)
4855{
1476d1bd
MM
4856 return as_a <rtx_jump_insn *> (
4857 emit_pattern_before (pattern, before, true, false,
4858 make_jump_insn_raw));
a7102479
JH
4859}
4860
5368224f 4861/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4862rtx_insn *
596f2b17 4863emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4864{
e8110d6f
NF
4865 return emit_pattern_before_setloc (pattern, before, loc, false,
4866 make_call_insn_raw);
0d682900 4867}
a7102479 4868
e8110d6f 4869/* Like emit_call_insn_before_noloc,
5368224f 4870 but set insn_location according to BEFORE. */
cd459bf8 4871rtx_insn *
596f2b17 4872emit_call_insn_before (rtx pattern, rtx_insn *before)
a7102479 4873{
e8110d6f
NF
4874 return emit_pattern_before (pattern, before, true, false,
4875 make_call_insn_raw);
a7102479 4876}
b5b8b0ac 4877
5368224f 4878/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4879rtx_insn *
b5b8b0ac
AO
4880emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4881{
e8110d6f
NF
4882 return emit_pattern_before_setloc (pattern, before, loc, false,
4883 make_debug_insn_raw);
b5b8b0ac
AO
4884}
4885
e8110d6f 4886/* Like emit_debug_insn_before_noloc,
5368224f 4887 but set insn_location according to BEFORE. */
cd459bf8 4888rtx_insn *
3a6216b0 4889emit_debug_insn_before (rtx pattern, rtx_insn *before)
b5b8b0ac 4890{
e8110d6f
NF
4891 return emit_pattern_before (pattern, before, false, false,
4892 make_debug_insn_raw);
b5b8b0ac 4893}
0d682900 4894\f
2f937369
DM
4895/* Take X and emit it at the end of the doubly-linked
4896 INSN list.
23b2ce53
RS
4897
4898 Returns the last insn emitted. */
4899
cd459bf8 4900rtx_insn *
502b8322 4901emit_insn (rtx x)
23b2ce53 4902{
cd459bf8
DM
4903 rtx_insn *last = get_last_insn ();
4904 rtx_insn *insn;
23b2ce53 4905
2f937369
DM
4906 if (x == NULL_RTX)
4907 return last;
23b2ce53 4908
2f937369
DM
4909 switch (GET_CODE (x))
4910 {
b5b8b0ac 4911 case DEBUG_INSN:
2f937369
DM
4912 case INSN:
4913 case JUMP_INSN:
4914 case CALL_INSN:
4915 case CODE_LABEL:
4916 case BARRIER:
4917 case NOTE:
cd459bf8 4918 insn = as_a <rtx_insn *> (x);
2f937369 4919 while (insn)
23b2ce53 4920 {
cd459bf8 4921 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 4922 add_insn (insn);
2f937369
DM
4923 last = insn;
4924 insn = next;
23b2ce53 4925 }
2f937369 4926 break;
23b2ce53 4927
2f937369 4928#ifdef ENABLE_RTL_CHECKING
39718607 4929 case JUMP_TABLE_DATA:
2f937369 4930 case SEQUENCE:
5b0264cb 4931 gcc_unreachable ();
2f937369
DM
4932 break;
4933#endif
23b2ce53 4934
2f937369
DM
4935 default:
4936 last = make_insn_raw (x);
4937 add_insn (last);
4938 break;
23b2ce53
RS
4939 }
4940
4941 return last;
4942}
4943
b5b8b0ac
AO
4944/* Make an insn of code DEBUG_INSN with pattern X
4945 and add it to the end of the doubly-linked list. */
4946
cd459bf8 4947rtx_insn *
b5b8b0ac
AO
4948emit_debug_insn (rtx x)
4949{
cd459bf8
DM
4950 rtx_insn *last = get_last_insn ();
4951 rtx_insn *insn;
b5b8b0ac
AO
4952
4953 if (x == NULL_RTX)
4954 return last;
4955
4956 switch (GET_CODE (x))
4957 {
4958 case DEBUG_INSN:
4959 case INSN:
4960 case JUMP_INSN:
4961 case CALL_INSN:
4962 case CODE_LABEL:
4963 case BARRIER:
4964 case NOTE:
cd459bf8 4965 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
4966 while (insn)
4967 {
cd459bf8 4968 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
4969 add_insn (insn);
4970 last = insn;
4971 insn = next;
4972 }
4973 break;
4974
4975#ifdef ENABLE_RTL_CHECKING
39718607 4976 case JUMP_TABLE_DATA:
b5b8b0ac
AO
4977 case SEQUENCE:
4978 gcc_unreachable ();
4979 break;
4980#endif
4981
4982 default:
4983 last = make_debug_insn_raw (x);
4984 add_insn (last);
4985 break;
4986 }
4987
4988 return last;
4989}
4990
2f937369
DM
4991/* Make an insn of code JUMP_INSN with pattern X
4992 and add it to the end of the doubly-linked list. */
23b2ce53 4993
cd459bf8 4994rtx_insn *
502b8322 4995emit_jump_insn (rtx x)
23b2ce53 4996{
cd459bf8
DM
4997 rtx_insn *last = NULL;
4998 rtx_insn *insn;
23b2ce53 4999
2f937369 5000 switch (GET_CODE (x))
23b2ce53 5001 {
b5b8b0ac 5002 case DEBUG_INSN:
2f937369
DM
5003 case INSN:
5004 case JUMP_INSN:
5005 case CALL_INSN:
5006 case CODE_LABEL:
5007 case BARRIER:
5008 case NOTE:
cd459bf8 5009 insn = as_a <rtx_insn *> (x);
2f937369
DM
5010 while (insn)
5011 {
cd459bf8 5012 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5013 add_insn (insn);
5014 last = insn;
5015 insn = next;
5016 }
5017 break;
e0a5c5eb 5018
2f937369 5019#ifdef ENABLE_RTL_CHECKING
39718607 5020 case JUMP_TABLE_DATA:
2f937369 5021 case SEQUENCE:
5b0264cb 5022 gcc_unreachable ();
2f937369
DM
5023 break;
5024#endif
e0a5c5eb 5025
2f937369
DM
5026 default:
5027 last = make_jump_insn_raw (x);
5028 add_insn (last);
5029 break;
3c030e88 5030 }
e0a5c5eb
RS
5031
5032 return last;
5033}
5034
2f937369 5035/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5036 and add it to the end of the doubly-linked list. */
5037
cd459bf8 5038rtx_insn *
502b8322 5039emit_call_insn (rtx x)
23b2ce53 5040{
cd459bf8 5041 rtx_insn *insn;
2f937369
DM
5042
5043 switch (GET_CODE (x))
23b2ce53 5044 {
b5b8b0ac 5045 case DEBUG_INSN:
2f937369
DM
5046 case INSN:
5047 case JUMP_INSN:
5048 case CALL_INSN:
5049 case CODE_LABEL:
5050 case BARRIER:
5051 case NOTE:
5052 insn = emit_insn (x);
5053 break;
23b2ce53 5054
2f937369
DM
5055#ifdef ENABLE_RTL_CHECKING
5056 case SEQUENCE:
39718607 5057 case JUMP_TABLE_DATA:
5b0264cb 5058 gcc_unreachable ();
2f937369
DM
5059 break;
5060#endif
23b2ce53 5061
2f937369
DM
5062 default:
5063 insn = make_call_insn_raw (x);
23b2ce53 5064 add_insn (insn);
2f937369 5065 break;
23b2ce53 5066 }
2f937369
DM
5067
5068 return insn;
23b2ce53
RS
5069}
5070
5071/* Add the label LABEL to the end of the doubly-linked list. */
5072
1476d1bd
MM
5073rtx_code_label *
5074emit_label (rtx uncast_label)
23b2ce53 5075{
1476d1bd
MM
5076 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5077
468660d3
SB
5078 gcc_checking_assert (INSN_UID (label) == 0);
5079 INSN_UID (label) = cur_insn_uid++;
1476d1bd
MM
5080 add_insn (label);
5081 return label;
23b2ce53
RS
5082}
5083
39718607
SB
5084/* Make an insn of code JUMP_TABLE_DATA
5085 and add it to the end of the doubly-linked list. */
5086
4598afdd 5087rtx_jump_table_data *
39718607
SB
5088emit_jump_table_data (rtx table)
5089{
4598afdd
DM
5090 rtx_jump_table_data *jump_table_data =
5091 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5092 INSN_UID (jump_table_data) = cur_insn_uid++;
5093 PATTERN (jump_table_data) = table;
5094 BLOCK_FOR_INSN (jump_table_data) = NULL;
5095 add_insn (jump_table_data);
5096 return jump_table_data;
5097}
5098
23b2ce53
RS
5099/* Make an insn of code BARRIER
5100 and add it to the end of the doubly-linked list. */
5101
cd459bf8 5102rtx_barrier *
502b8322 5103emit_barrier (void)
23b2ce53 5104{
cd459bf8 5105 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5106 INSN_UID (barrier) = cur_insn_uid++;
5107 add_insn (barrier);
5108 return barrier;
5109}
5110
5f2fc772 5111/* Emit a copy of note ORIG. */
502b8322 5112
66e8df53
DM
5113rtx_note *
5114emit_note_copy (rtx_note *orig)
5f2fc772 5115{
96fba521 5116 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5117 rtx_note *note = make_note_raw (kind);
5f2fc772 5118 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5119 add_insn (note);
2e040219 5120 return note;
23b2ce53
RS
5121}
5122
2e040219
NS
5123/* Make an insn of code NOTE or type NOTE_NO
5124 and add it to the end of the doubly-linked list. */
23b2ce53 5125
66e8df53 5126rtx_note *
a38e7aa5 5127emit_note (enum insn_note kind)
23b2ce53 5128{
66e8df53 5129 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5130 add_insn (note);
5131 return note;
5132}
5133
c41c1387
RS
5134/* Emit a clobber of lvalue X. */
5135
cd459bf8 5136rtx_insn *
c41c1387
RS
5137emit_clobber (rtx x)
5138{
5139 /* CONCATs should not appear in the insn stream. */
5140 if (GET_CODE (x) == CONCAT)
5141 {
5142 emit_clobber (XEXP (x, 0));
5143 return emit_clobber (XEXP (x, 1));
5144 }
5145 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5146}
5147
5148/* Return a sequence of insns to clobber lvalue X. */
5149
cd459bf8 5150rtx_insn *
c41c1387
RS
5151gen_clobber (rtx x)
5152{
cd459bf8 5153 rtx_insn *seq;
c41c1387
RS
5154
5155 start_sequence ();
5156 emit_clobber (x);
5157 seq = get_insns ();
5158 end_sequence ();
5159 return seq;
5160}
5161
5162/* Emit a use of rvalue X. */
5163
cd459bf8 5164rtx_insn *
c41c1387
RS
5165emit_use (rtx x)
5166{
5167 /* CONCATs should not appear in the insn stream. */
5168 if (GET_CODE (x) == CONCAT)
5169 {
5170 emit_use (XEXP (x, 0));
5171 return emit_use (XEXP (x, 1));
5172 }
5173 return emit_insn (gen_rtx_USE (VOIDmode, x));
5174}
5175
5176/* Return a sequence of insns to use rvalue X. */
5177
cd459bf8 5178rtx_insn *
c41c1387
RS
5179gen_use (rtx x)
5180{
cd459bf8 5181 rtx_insn *seq;
c41c1387
RS
5182
5183 start_sequence ();
5184 emit_use (x);
5185 seq = get_insns ();
5186 end_sequence ();
5187 return seq;
5188}
5189
c8912e53
RS
5190/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5191 Return the set in INSN that such notes describe, or NULL if the notes
5192 have no meaning for INSN. */
5193
5194rtx
5195set_for_reg_notes (rtx insn)
5196{
5197 rtx pat, reg;
5198
5199 if (!INSN_P (insn))
5200 return NULL_RTX;
5201
5202 pat = PATTERN (insn);
5203 if (GET_CODE (pat) == PARALLEL)
5204 {
5205 /* We do not use single_set because that ignores SETs of unused
5206 registers. REG_EQUAL and REG_EQUIV notes really do require the
5207 PARALLEL to have a single SET. */
5208 if (multiple_sets (insn))
5209 return NULL_RTX;
5210 pat = XVECEXP (pat, 0, 0);
5211 }
5212
5213 if (GET_CODE (pat) != SET)
5214 return NULL_RTX;
5215
5216 reg = SET_DEST (pat);
5217
5218 /* Notes apply to the contents of a STRICT_LOW_PART. */
7f7379f6
KV
5219 if (GET_CODE (reg) == STRICT_LOW_PART
5220 || GET_CODE (reg) == ZERO_EXTRACT)
c8912e53
RS
5221 reg = XEXP (reg, 0);
5222
5223 /* Check that we have a register. */
5224 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5225 return NULL_RTX;
5226
5227 return pat;
5228}
5229
87b47c85 5230/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5231 note of this type already exists, remove it first. */
87b47c85 5232
3d238248 5233rtx
502b8322 5234set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5235{
5236 rtx note = find_reg_note (insn, kind, NULL_RTX);
5237
52488da1
JW
5238 switch (kind)
5239 {
5240 case REG_EQUAL:
5241 case REG_EQUIV:
c8912e53
RS
5242 if (!set_for_reg_notes (insn))
5243 return NULL_RTX;
52488da1
JW
5244
5245 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5246 It serves no useful purpose and breaks eliminate_regs. */
5247 if (GET_CODE (datum) == ASM_OPERANDS)
5248 return NULL_RTX;
109374e2
RS
5249
5250 /* Notes with side effects are dangerous. Even if the side-effect
5251 initially mirrors one in PATTERN (INSN), later optimizations
5252 might alter the way that the final register value is calculated
5253 and so move or alter the side-effect in some way. The note would
5254 then no longer be a valid substitution for SET_SRC. */
5255 if (side_effects_p (datum))
5256 return NULL_RTX;
52488da1
JW
5257 break;
5258
5259 default:
5260 break;
5261 }
3d238248 5262
c8912e53
RS
5263 if (note)
5264 XEXP (note, 0) = datum;
5265 else
5266 {
5267 add_reg_note (insn, kind, datum);
5268 note = REG_NOTES (insn);
5269 }
6fb5fa3c
DB
5270
5271 switch (kind)
3d238248 5272 {
6fb5fa3c
DB
5273 case REG_EQUAL:
5274 case REG_EQUIV:
b2908ba6 5275 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5276 break;
5277 default:
5278 break;
3d238248 5279 }
87b47c85 5280
c8912e53 5281 return note;
87b47c85 5282}
7543f918
JR
5283
5284/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5285rtx
5286set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5287{
c8912e53 5288 rtx set = set_for_reg_notes (insn);
7543f918
JR
5289
5290 if (set && SET_DEST (set) == dst)
5291 return set_unique_reg_note (insn, kind, datum);
5292 return NULL_RTX;
5293}
23b2ce53 5294\f
9d8895c9
RS
5295/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5296 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5297 is true.
5298
23b2ce53
RS
5299 If X is a label, it is simply added into the insn chain. */
5300
cd459bf8 5301rtx_insn *
9d8895c9 5302emit (rtx x, bool allow_barrier_p)
23b2ce53
RS
5303{
5304 enum rtx_code code = classify_insn (x);
5305
5b0264cb 5306 switch (code)
23b2ce53 5307 {
5b0264cb
NS
5308 case CODE_LABEL:
5309 return emit_label (x);
5310 case INSN:
5311 return emit_insn (x);
5312 case JUMP_INSN:
5313 {
cd459bf8 5314 rtx_insn *insn = emit_jump_insn (x);
9d8895c9
RS
5315 if (allow_barrier_p
5316 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5b0264cb
NS
5317 return emit_barrier ();
5318 return insn;
5319 }
5320 case CALL_INSN:
5321 return emit_call_insn (x);
b5b8b0ac
AO
5322 case DEBUG_INSN:
5323 return emit_debug_insn (x);
5b0264cb
NS
5324 default:
5325 gcc_unreachable ();
23b2ce53 5326 }
23b2ce53
RS
5327}
5328\f
e2500fed 5329/* Space for free sequence stack entries. */
1431042e 5330static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5331
4dfa0342
RH
5332/* Begin emitting insns to a sequence. If this sequence will contain
5333 something that might cause the compiler to pop arguments to function
5334 calls (because those pops have previously been deferred; see
5335 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5336 before calling this function. That will ensure that the deferred
5337 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5338
5339void
502b8322 5340start_sequence (void)
23b2ce53
RS
5341{
5342 struct sequence_stack *tem;
5343
e2500fed
GK
5344 if (free_sequence_stack != NULL)
5345 {
5346 tem = free_sequence_stack;
5347 free_sequence_stack = tem->next;
5348 }
5349 else
766090c2 5350 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5351
614d5bd8 5352 tem->next = get_current_sequence ()->next;
5936d944
JH
5353 tem->first = get_insns ();
5354 tem->last = get_last_insn ();
614d5bd8 5355 get_current_sequence ()->next = tem;
23b2ce53 5356
5936d944
JH
5357 set_first_insn (0);
5358 set_last_insn (0);
23b2ce53
RS
5359}
5360
5c7a310f
MM
5361/* Set up the insn chain starting with FIRST as the current sequence,
5362 saving the previously current one. See the documentation for
5363 start_sequence for more information about how to use this function. */
23b2ce53
RS
5364
5365void
fee3e72c 5366push_to_sequence (rtx_insn *first)
23b2ce53 5367{
fee3e72c 5368 rtx_insn *last;
23b2ce53
RS
5369
5370 start_sequence ();
5371
e84a58ff
EB
5372 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5373 ;
23b2ce53 5374
5936d944
JH
5375 set_first_insn (first);
5376 set_last_insn (last);
23b2ce53
RS
5377}
5378
bb27eeda
SE
5379/* Like push_to_sequence, but take the last insn as an argument to avoid
5380 looping through the list. */
5381
5382void
fee3e72c 5383push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5384{
5385 start_sequence ();
5386
5936d944
JH
5387 set_first_insn (first);
5388 set_last_insn (last);
bb27eeda
SE
5389}
5390
f15ae3a1
TW
5391/* Set up the outer-level insn chain
5392 as the current sequence, saving the previously current one. */
5393
5394void
502b8322 5395push_topmost_sequence (void)
f15ae3a1 5396{
614d5bd8 5397 struct sequence_stack *top;
f15ae3a1
TW
5398
5399 start_sequence ();
5400
614d5bd8 5401 top = get_topmost_sequence ();
5936d944
JH
5402 set_first_insn (top->first);
5403 set_last_insn (top->last);
f15ae3a1
TW
5404}
5405
5406/* After emitting to the outer-level insn chain, update the outer-level
5407 insn chain, and restore the previous saved state. */
5408
5409void
502b8322 5410pop_topmost_sequence (void)
f15ae3a1 5411{
614d5bd8 5412 struct sequence_stack *top;
f15ae3a1 5413
614d5bd8 5414 top = get_topmost_sequence ();
5936d944
JH
5415 top->first = get_insns ();
5416 top->last = get_last_insn ();
f15ae3a1
TW
5417
5418 end_sequence ();
5419}
5420
23b2ce53
RS
5421/* After emitting to a sequence, restore previous saved state.
5422
5c7a310f 5423 To get the contents of the sequence just made, you must call
2f937369 5424 `get_insns' *before* calling here.
5c7a310f
MM
5425
5426 If the compiler might have deferred popping arguments while
5427 generating this sequence, and this sequence will not be immediately
5428 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5429 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5430 pops are inserted into this sequence, and not into some random
5431 location in the instruction stream. See INHIBIT_DEFER_POP for more
5432 information about deferred popping of arguments. */
23b2ce53
RS
5433
5434void
502b8322 5435end_sequence (void)
23b2ce53 5436{
614d5bd8 5437 struct sequence_stack *tem = get_current_sequence ()->next;
23b2ce53 5438
5936d944
JH
5439 set_first_insn (tem->first);
5440 set_last_insn (tem->last);
614d5bd8 5441 get_current_sequence ()->next = tem->next;
23b2ce53 5442
e2500fed
GK
5443 memset (tem, 0, sizeof (*tem));
5444 tem->next = free_sequence_stack;
5445 free_sequence_stack = tem;
23b2ce53
RS
5446}
5447
5448/* Return 1 if currently emitting into a sequence. */
5449
5450int
502b8322 5451in_sequence_p (void)
23b2ce53 5452{
614d5bd8 5453 return get_current_sequence ()->next != 0;
23b2ce53 5454}
23b2ce53 5455\f
59ec66dc
MM
5456/* Put the various virtual registers into REGNO_REG_RTX. */
5457
2bbdec73 5458static void
bd60bab2 5459init_virtual_regs (void)
59ec66dc 5460{
bd60bab2
JH
5461 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5462 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5463 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5464 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5465 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5466 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5467 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5468}
5469
da43a810
BS
5470\f
5471/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5472static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5473static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5474static int copy_insn_n_scratches;
5475
5476/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5477 copied an ASM_OPERANDS.
5478 In that case, it is the original input-operand vector. */
5479static rtvec orig_asm_operands_vector;
5480
5481/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5482 copied an ASM_OPERANDS.
5483 In that case, it is the copied input-operand vector. */
5484static rtvec copy_asm_operands_vector;
5485
5486/* Likewise for the constraints vector. */
5487static rtvec orig_asm_constraints_vector;
5488static rtvec copy_asm_constraints_vector;
5489
5490/* Recursively create a new copy of an rtx for copy_insn.
5491 This function differs from copy_rtx in that it handles SCRATCHes and
5492 ASM_OPERANDs properly.
5493 Normally, this function is not used directly; use copy_insn as front end.
5494 However, you could first copy an insn pattern with copy_insn and then use
5495 this function afterwards to properly copy any REG_NOTEs containing
5496 SCRATCHes. */
5497
5498rtx
502b8322 5499copy_insn_1 (rtx orig)
da43a810 5500{
b3694847
SS
5501 rtx copy;
5502 int i, j;
5503 RTX_CODE code;
5504 const char *format_ptr;
da43a810 5505
cd9c1ca8
RH
5506 if (orig == NULL)
5507 return NULL;
5508
da43a810
BS
5509 code = GET_CODE (orig);
5510
5511 switch (code)
5512 {
5513 case REG:
a52a87c3 5514 case DEBUG_EXPR:
d8116890 5515 CASE_CONST_ANY:
da43a810
BS
5516 case SYMBOL_REF:
5517 case CODE_LABEL:
5518 case PC:
5519 case CC0:
276e0224 5520 case RETURN:
26898771 5521 case SIMPLE_RETURN:
da43a810 5522 return orig;
3e89ed8d 5523 case CLOBBER:
c5c5ba89
JH
5524 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5525 clobbers or clobbers of hard registers that originated as pseudos.
5526 This is needed to allow safe register renaming. */
5527 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5528 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
3e89ed8d
JH
5529 return orig;
5530 break;
da43a810
BS
5531
5532 case SCRATCH:
5533 for (i = 0; i < copy_insn_n_scratches; i++)
5534 if (copy_insn_scratch_in[i] == orig)
5535 return copy_insn_scratch_out[i];
5536 break;
5537
5538 case CONST:
6fb5fa3c 5539 if (shared_const_p (orig))
da43a810
BS
5540 return orig;
5541 break;
750c9258 5542
da43a810
BS
5543 /* A MEM with a constant address is not sharable. The problem is that
5544 the constant address may need to be reloaded. If the mem is shared,
5545 then reloading one copy of this mem will cause all copies to appear
5546 to have been reloaded. */
5547
5548 default:
5549 break;
5550 }
5551
aacd3885
RS
5552 /* Copy the various flags, fields, and other information. We assume
5553 that all fields need copying, and then clear the fields that should
da43a810
BS
5554 not be copied. That is the sensible default behavior, and forces
5555 us to explicitly document why we are *not* copying a flag. */
aacd3885 5556 copy = shallow_copy_rtx (orig);
da43a810
BS
5557
5558 /* We do not copy the USED flag, which is used as a mark bit during
5559 walks over the RTL. */
2adc7f12 5560 RTX_FLAG (copy, used) = 0;
da43a810
BS
5561
5562 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5563 if (INSN_P (orig))
da43a810 5564 {
2adc7f12
JJ
5565 RTX_FLAG (copy, jump) = 0;
5566 RTX_FLAG (copy, call) = 0;
5567 RTX_FLAG (copy, frame_related) = 0;
da43a810 5568 }
750c9258 5569
da43a810
BS
5570 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5571
5572 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5573 switch (*format_ptr++)
5574 {
5575 case 'e':
5576 if (XEXP (orig, i) != NULL)
5577 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5578 break;
da43a810 5579
aacd3885
RS
5580 case 'E':
5581 case 'V':
5582 if (XVEC (orig, i) == orig_asm_constraints_vector)
5583 XVEC (copy, i) = copy_asm_constraints_vector;
5584 else if (XVEC (orig, i) == orig_asm_operands_vector)
5585 XVEC (copy, i) = copy_asm_operands_vector;
5586 else if (XVEC (orig, i) != NULL)
5587 {
5588 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5589 for (j = 0; j < XVECLEN (copy, i); j++)
5590 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5591 }
5592 break;
da43a810 5593
aacd3885
RS
5594 case 't':
5595 case 'w':
5596 case 'i':
5597 case 's':
5598 case 'S':
5599 case 'u':
5600 case '0':
5601 /* These are left unchanged. */
5602 break;
da43a810 5603
aacd3885
RS
5604 default:
5605 gcc_unreachable ();
5606 }
da43a810
BS
5607
5608 if (code == SCRATCH)
5609 {
5610 i = copy_insn_n_scratches++;
5b0264cb 5611 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5612 copy_insn_scratch_in[i] = orig;
5613 copy_insn_scratch_out[i] = copy;
5614 }
5615 else if (code == ASM_OPERANDS)
5616 {
6462bb43
AO
5617 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5618 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5619 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5620 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5621 }
5622
5623 return copy;
5624}
5625
5626/* Create a new copy of an rtx.
5627 This function differs from copy_rtx in that it handles SCRATCHes and
5628 ASM_OPERANDs properly.
5629 INSN doesn't really have to be a full INSN; it could be just the
5630 pattern. */
5631rtx
502b8322 5632copy_insn (rtx insn)
da43a810
BS
5633{
5634 copy_insn_n_scratches = 0;
5635 orig_asm_operands_vector = 0;
5636 orig_asm_constraints_vector = 0;
5637 copy_asm_operands_vector = 0;
5638 copy_asm_constraints_vector = 0;
5639 return copy_insn_1 (insn);
5640}
59ec66dc 5641
8e383849
JR
5642/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5643 on that assumption that INSN itself remains in its original place. */
5644
f8f0516e
DM
5645rtx_insn *
5646copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5647{
5648 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5649 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5650 INSN_UID (insn) = cur_insn_uid++;
5651 return insn;
5652}
5653
23b2ce53
RS
5654/* Initialize data structures and variables in this file
5655 before generating rtl for each function. */
5656
5657void
502b8322 5658init_emit (void)
23b2ce53 5659{
5936d944
JH
5660 set_first_insn (NULL);
5661 set_last_insn (NULL);
b5b8b0ac
AO
5662 if (MIN_NONDEBUG_INSN_UID)
5663 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5664 else
5665 cur_insn_uid = 1;
5666 cur_debug_insn_uid = 1;
23b2ce53 5667 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5668 first_label_num = label_num;
614d5bd8 5669 get_current_sequence ()->next = NULL;
23b2ce53 5670
23b2ce53
RS
5671 /* Init the tables that describe all the pseudo regs. */
5672
3e029763 5673 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5674
3e029763 5675 crtl->emit.regno_pointer_align
1b4572a8 5676 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5677
766090c2 5678 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5679
e50126e8 5680 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5681 memcpy (regno_reg_rtx,
5fb0e246 5682 initial_regno_reg_rtx,
6cde4876 5683 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5684
23b2ce53 5685 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5686 init_virtual_regs ();
740ab4a2
RK
5687
5688 /* Indicate that the virtual registers and stack locations are
5689 all pointers. */
3502dc9c
JDA
5690 REG_POINTER (stack_pointer_rtx) = 1;
5691 REG_POINTER (frame_pointer_rtx) = 1;
5692 REG_POINTER (hard_frame_pointer_rtx) = 1;
5693 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5694
3502dc9c
JDA
5695 REG_POINTER (virtual_incoming_args_rtx) = 1;
5696 REG_POINTER (virtual_stack_vars_rtx) = 1;
5697 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5698 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5699 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5700
86fe05e0 5701#ifdef STACK_BOUNDARY
bdb429a5
RK
5702 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5703 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5704 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5705 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5706
5707 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5708 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5709 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5710 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5711 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5712#endif
5713
5e82e7bd
JVA
5714#ifdef INIT_EXPANDERS
5715 INIT_EXPANDERS;
5716#endif
23b2ce53
RS
5717}
5718
a73b091d 5719/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5720
5721static rtx
ef4bddc2 5722gen_const_vector (machine_mode mode, int constant)
69ef87e2
AH
5723{
5724 rtx tem;
5725 rtvec v;
5726 int units, i;
ef4bddc2 5727 machine_mode inner;
69ef87e2
AH
5728
5729 units = GET_MODE_NUNITS (mode);
5730 inner = GET_MODE_INNER (mode);
5731
15ed7b52
JG
5732 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5733
69ef87e2
AH
5734 v = rtvec_alloc (units);
5735
a73b091d
JW
5736 /* We need to call this function after we set the scalar const_tiny_rtx
5737 entries. */
5738 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5739
5740 for (i = 0; i < units; ++i)
a73b091d 5741 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5742
a06e3c40 5743 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5744 return tem;
5745}
5746
a06e3c40 5747/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5748 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5749rtx
ef4bddc2 5750gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
a06e3c40 5751{
ef4bddc2 5752 machine_mode inner = GET_MODE_INNER (mode);
a73b091d
JW
5753 int nunits = GET_MODE_NUNITS (mode);
5754 rtx x;
a06e3c40
R
5755 int i;
5756
a73b091d
JW
5757 /* Check to see if all of the elements have the same value. */
5758 x = RTVEC_ELT (v, nunits - 1);
5759 for (i = nunits - 2; i >= 0; i--)
5760 if (RTVEC_ELT (v, i) != x)
5761 break;
5762
5763 /* If the values are all the same, check to see if we can use one of the
5764 standard constant vectors. */
5765 if (i == -1)
5766 {
5767 if (x == CONST0_RTX (inner))
5768 return CONST0_RTX (mode);
5769 else if (x == CONST1_RTX (inner))
5770 return CONST1_RTX (mode);
e7c82a99
JJ
5771 else if (x == CONSTM1_RTX (inner))
5772 return CONSTM1_RTX (mode);
a73b091d
JW
5773 }
5774
5775 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5776}
5777
b5deb7b6
SL
5778/* Initialise global register information required by all functions. */
5779
5780void
5781init_emit_regs (void)
5782{
5783 int i;
ef4bddc2 5784 machine_mode mode;
1c3f523e 5785 mem_attrs *attrs;
b5deb7b6
SL
5786
5787 /* Reset register attributes */
aebf76a2 5788 reg_attrs_htab->empty ();
b5deb7b6
SL
5789
5790 /* We need reg_raw_mode, so initialize the modes now. */
5791 init_reg_modes_target ();
5792
5793 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5794 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5795 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5796 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5797 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5798 virtual_incoming_args_rtx =
5799 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5800 virtual_stack_vars_rtx =
5801 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5802 virtual_stack_dynamic_rtx =
5803 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5804 virtual_outgoing_args_rtx =
5805 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5806 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5807 virtual_preferred_stack_boundary_rtx =
5808 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5809
5810 /* Initialize RTL for commonly used hard registers. These are
5811 copied into regno_reg_rtx as we begin to compile each function. */
5812 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5813 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5814
5815#ifdef RETURN_ADDRESS_POINTER_REGNUM
5816 return_address_pointer_rtx
5817 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5818#endif
5819
ca72dad5 5820 pic_offset_table_rtx = NULL_RTX;
b5deb7b6
SL
5821 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5822 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
1c3f523e
RS
5823
5824 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5825 {
ef4bddc2 5826 mode = (machine_mode) i;
766090c2 5827 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
5828 attrs->align = BITS_PER_UNIT;
5829 attrs->addrspace = ADDR_SPACE_GENERIC;
5830 if (mode != BLKmode)
5831 {
754c3d5d
RS
5832 attrs->size_known_p = true;
5833 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5834 if (STRICT_ALIGNMENT)
5835 attrs->align = GET_MODE_ALIGNMENT (mode);
5836 }
5837 mode_mem_attrs[i] = attrs;
5838 }
b5deb7b6
SL
5839}
5840
aa3a12d6
RS
5841/* Initialize global machine_mode variables. */
5842
5843void
5844init_derived_machine_modes (void)
5845{
5846 byte_mode = VOIDmode;
5847 word_mode = VOIDmode;
5848
ef4bddc2 5849 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
aa3a12d6
RS
5850 mode != VOIDmode;
5851 mode = GET_MODE_WIDER_MODE (mode))
5852 {
5853 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5854 && byte_mode == VOIDmode)
5855 byte_mode = mode;
5856
5857 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5858 && word_mode == VOIDmode)
5859 word_mode = mode;
5860 }
5861
5862 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5863}
5864
2d888286 5865/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5866
5867void
2d888286 5868init_emit_once (void)
23b2ce53
RS
5869{
5870 int i;
ef4bddc2
RS
5871 machine_mode mode;
5872 machine_mode double_mode;
23b2ce53 5873
807e902e
KZ
5874 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5875 CONST_FIXED, and memory attribute hash tables. */
aebf76a2 5876 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
173b24b9 5877
807e902e 5878#if TARGET_SUPPORTS_WIDE_INT
aebf76a2 5879 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
807e902e 5880#endif
aebf76a2 5881 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5692c7bc 5882
aebf76a2 5883 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
091a3ac7 5884
aebf76a2 5885 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
67673f5c 5886
5da077de 5887#ifdef INIT_EXPANDERS
414c4dc4
NC
5888 /* This is to initialize {init|mark|free}_machine_status before the first
5889 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5890 end which calls push_function_context_to before the first call to
5da077de
AS
5891 init_function_start. */
5892 INIT_EXPANDERS;
5893#endif
5894
23b2ce53
RS
5895 /* Create the unique rtx's for certain rtx codes and operand values. */
5896
a2a8cc44 5897 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5898 tries to use these variables. */
23b2ce53 5899 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5900 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5901 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5902
68d75312
JC
5903 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5904 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5905 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5906 else
3b80f6ca 5907 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5908
aa3a12d6
RS
5909 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5910
807e902e
KZ
5911 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5912 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5913 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
5914
5915 dconstm1 = dconst1;
5916 dconstm1.sign = 1;
03f2ea93
RS
5917
5918 dconsthalf = dconst1;
1e92bbb9 5919 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5920
e7c82a99 5921 for (i = 0; i < 3; i++)
23b2ce53 5922 {
aefa9d43 5923 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5924 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5925
15ed7b52
JG
5926 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5927 mode != VOIDmode;
5928 mode = GET_MODE_WIDER_MODE (mode))
5929 const_tiny_rtx[i][(int) mode] =
555affd7 5930 const_double_from_real_value (*r, mode);
15ed7b52
JG
5931
5932 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5933 mode != VOIDmode;
23b2ce53 5934 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc 5935 const_tiny_rtx[i][(int) mode] =
555affd7 5936 const_double_from_real_value (*r, mode);
23b2ce53 5937
906c4e36 5938 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5939
15ed7b52
JG
5940 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5941 mode != VOIDmode;
23b2ce53 5942 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5943 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5944
ede6c734
MS
5945 for (mode = MIN_MODE_PARTIAL_INT;
5946 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 5947 mode = (machine_mode)((int)(mode) + 1))
33d3e559 5948 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5949 }
5950
e7c82a99
JJ
5951 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5952
5953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5954 mode != VOIDmode;
5955 mode = GET_MODE_WIDER_MODE (mode))
5956 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5957
ede6c734
MS
5958 for (mode = MIN_MODE_PARTIAL_INT;
5959 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 5960 mode = (machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5961 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5962
e90721b1
AP
5963 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5964 mode != VOIDmode;
5965 mode = GET_MODE_WIDER_MODE (mode))
5966 {
5967 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5968 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5969 }
5970
5971 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5972 mode != VOIDmode;
5973 mode = GET_MODE_WIDER_MODE (mode))
5974 {
5975 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5976 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5977 }
5978
69ef87e2
AH
5979 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5980 mode != VOIDmode;
5981 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5982 {
5983 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5984 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 5985 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 5986 }
69ef87e2
AH
5987
5988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5989 mode != VOIDmode;
5990 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5991 {
5992 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5993 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5994 }
69ef87e2 5995
325217ed
CF
5996 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5997 mode != VOIDmode;
5998 mode = GET_MODE_WIDER_MODE (mode))
5999 {
c3284718
RS
6000 FCONST0 (mode).data.high = 0;
6001 FCONST0 (mode).data.low = 0;
6002 FCONST0 (mode).mode = mode;
091a3ac7
CF
6003 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6004 FCONST0 (mode), mode);
325217ed
CF
6005 }
6006
6007 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6008 mode != VOIDmode;
6009 mode = GET_MODE_WIDER_MODE (mode))
6010 {
c3284718
RS
6011 FCONST0 (mode).data.high = 0;
6012 FCONST0 (mode).data.low = 0;
6013 FCONST0 (mode).mode = mode;
091a3ac7
CF
6014 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6015 FCONST0 (mode), mode);
325217ed
CF
6016 }
6017
6018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6019 mode != VOIDmode;
6020 mode = GET_MODE_WIDER_MODE (mode))
6021 {
c3284718
RS
6022 FCONST0 (mode).data.high = 0;
6023 FCONST0 (mode).data.low = 0;
6024 FCONST0 (mode).mode = mode;
091a3ac7
CF
6025 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6026 FCONST0 (mode), mode);
325217ed
CF
6027
6028 /* We store the value 1. */
c3284718
RS
6029 FCONST1 (mode).data.high = 0;
6030 FCONST1 (mode).data.low = 0;
6031 FCONST1 (mode).mode = mode;
6032 FCONST1 (mode).data
9be0ac8c
LC
6033 = double_int_one.lshift (GET_MODE_FBIT (mode),
6034 HOST_BITS_PER_DOUBLE_INT,
6035 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6036 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6037 FCONST1 (mode), mode);
325217ed
CF
6038 }
6039
6040 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6041 mode != VOIDmode;
6042 mode = GET_MODE_WIDER_MODE (mode))
6043 {
c3284718
RS
6044 FCONST0 (mode).data.high = 0;
6045 FCONST0 (mode).data.low = 0;
6046 FCONST0 (mode).mode = mode;
091a3ac7
CF
6047 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6048 FCONST0 (mode), mode);
325217ed
CF
6049
6050 /* We store the value 1. */
c3284718
RS
6051 FCONST1 (mode).data.high = 0;
6052 FCONST1 (mode).data.low = 0;
6053 FCONST1 (mode).mode = mode;
6054 FCONST1 (mode).data
9be0ac8c
LC
6055 = double_int_one.lshift (GET_MODE_FBIT (mode),
6056 HOST_BITS_PER_DOUBLE_INT,
6057 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6058 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6059 FCONST1 (mode), mode);
6060 }
6061
6062 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6063 mode != VOIDmode;
6064 mode = GET_MODE_WIDER_MODE (mode))
6065 {
6066 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6067 }
6068
6069 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6070 mode != VOIDmode;
6071 mode = GET_MODE_WIDER_MODE (mode))
6072 {
6073 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6074 }
6075
6076 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6077 mode != VOIDmode;
6078 mode = GET_MODE_WIDER_MODE (mode))
6079 {
6080 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6081 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6082 }
6083
6084 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6085 mode != VOIDmode;
6086 mode = GET_MODE_WIDER_MODE (mode))
6087 {
6088 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6089 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6090 }
6091
dbbbbf3b 6092 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
ef4bddc2 6093 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
dbbbbf3b 6094 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6095
f0417c82
RH
6096 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6097 if (STORE_FLAG_VALUE == 1)
6098 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91 6099
d5e254e1
IE
6100 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6101 mode != VOIDmode;
6102 mode = GET_MODE_WIDER_MODE (mode))
6103 {
6104 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6105 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6106 }
6107
ca4adc91
RS
6108 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6109 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6110 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6111 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
1476d1bd
MM
6112 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6113 /*prev_insn=*/NULL,
6114 /*next_insn=*/NULL,
6115 /*bb=*/NULL,
6116 /*pattern=*/NULL_RTX,
6117 /*location=*/-1,
6118 CODE_FOR_nothing,
6119 /*reg_notes=*/NULL_RTX);
23b2ce53 6120}
a11759a3 6121\f
969d70ca
JH
6122/* Produce exact duplicate of insn INSN after AFTER.
6123 Care updating of libcall regions if present. */
6124
cd459bf8 6125rtx_insn *
a1950df3 6126emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
969d70ca 6127{
cd459bf8
DM
6128 rtx_insn *new_rtx;
6129 rtx link;
969d70ca
JH
6130
6131 switch (GET_CODE (insn))
6132 {
6133 case INSN:
60564289 6134 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6135 break;
6136
6137 case JUMP_INSN:
60564289 6138 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6139 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6140 break;
6141
b5b8b0ac
AO
6142 case DEBUG_INSN:
6143 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6144 break;
6145
969d70ca 6146 case CALL_INSN:
60564289 6147 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6148 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6149 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6150 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6151 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6152 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6153 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6154 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6155 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6156 break;
6157
6158 default:
5b0264cb 6159 gcc_unreachable ();
969d70ca
JH
6160 }
6161
6162 /* Update LABEL_NUSES. */
60564289 6163 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6164
5368224f 6165 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6166
0a3d71f5
JW
6167 /* If the old insn is frame related, then so is the new one. This is
6168 primarily needed for IA-64 unwind info which marks epilogue insns,
6169 which may be duplicated by the basic block reordering code. */
60564289 6170 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6171
cf7c4aa6
HPN
6172 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6173 will make them. REG_LABEL_TARGETs are created there too, but are
6174 supposed to be sticky, so we copy them. */
969d70ca 6175 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6176 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
6177 {
6178 if (GET_CODE (link) == EXPR_LIST)
60564289 6179 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 6180 copy_insn_1 (XEXP (link, 0)));
969d70ca 6181 else
e5af9ddd 6182 add_shallow_copy_of_reg_note (new_rtx, link);
969d70ca
JH
6183 }
6184
60564289
KG
6185 INSN_CODE (new_rtx) = INSN_CODE (insn);
6186 return new_rtx;
969d70ca 6187}
e2500fed 6188
1431042e 6189static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d 6190rtx
ef4bddc2 6191gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
3e89ed8d
JH
6192{
6193 if (hard_reg_clobbers[mode][regno])
6194 return hard_reg_clobbers[mode][regno];
6195 else
6196 return (hard_reg_clobbers[mode][regno] =
6197 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6198}
6199
5368224f
DC
6200location_t prologue_location;
6201location_t epilogue_location;
78bde837
SB
6202
6203/* Hold current location information and last location information, so the
6204 datastructures are built lazily only when some instructions in given
6205 place are needed. */
3a50da34 6206static location_t curr_location;
78bde837 6207
5368224f 6208/* Allocate insn location datastructure. */
78bde837 6209void
5368224f 6210insn_locations_init (void)
78bde837 6211{
5368224f 6212 prologue_location = epilogue_location = 0;
78bde837 6213 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6214}
6215
6216/* At the end of emit stage, clear current location. */
6217void
5368224f 6218insn_locations_finalize (void)
78bde837 6219{
5368224f
DC
6220 epilogue_location = curr_location;
6221 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6222}
6223
6224/* Set current location. */
6225void
5368224f 6226set_curr_insn_location (location_t location)
78bde837 6227{
78bde837
SB
6228 curr_location = location;
6229}
6230
6231/* Get current location. */
6232location_t
5368224f 6233curr_insn_location (void)
78bde837
SB
6234{
6235 return curr_location;
6236}
6237
78bde837
SB
6238/* Return lexical scope block insn belongs to. */
6239tree
a1950df3 6240insn_scope (const rtx_insn *insn)
78bde837 6241{
5368224f 6242 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6243}
6244
6245/* Return line number of the statement that produced this insn. */
6246int
a1950df3 6247insn_line (const rtx_insn *insn)
78bde837 6248{
5368224f 6249 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6250}
6251
6252/* Return source file of the statement that produced this insn. */
6253const char *
a1950df3 6254insn_file (const rtx_insn *insn)
78bde837 6255{
5368224f 6256 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6257}
8930883e 6258
ffa4602f
EB
6259/* Return expanded location of the statement that produced this insn. */
6260expanded_location
a1950df3 6261insn_location (const rtx_insn *insn)
ffa4602f
EB
6262{
6263 return expand_location (INSN_LOCATION (insn));
6264}
6265
8930883e
MK
6266/* Return true if memory model MODEL requires a pre-operation (release-style)
6267 barrier or a post-operation (acquire-style) barrier. While not universal,
6268 this function matches behavior of several targets. */
6269
6270bool
6271need_atomic_barrier_p (enum memmodel model, bool pre)
6272{
40ad260d 6273 switch (model & MEMMODEL_BASE_MASK)
8930883e
MK
6274 {
6275 case MEMMODEL_RELAXED:
6276 case MEMMODEL_CONSUME:
6277 return false;
6278 case MEMMODEL_RELEASE:
6279 return pre;
6280 case MEMMODEL_ACQUIRE:
6281 return !pre;
6282 case MEMMODEL_ACQ_REL:
6283 case MEMMODEL_SEQ_CST:
6284 return true;
6285 default:
6286 gcc_unreachable ();
6287 }
6288}
6289\f
e2500fed 6290#include "gt-emit-rtl.h"