]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6 36#include "coretypes.h"
c7131fb2
AM
37#include "backend.h"
38#include "tree.h"
23b2ce53 39#include "rtl.h"
c7131fb2
AM
40#include "df.h"
41#include "diagnostic-core.h"
40e23961 42#include "alias.h"
40e23961 43#include "fold-const.h"
d8a2d370 44#include "varasm.h"
60393bbc 45#include "cfgrtl.h"
60393bbc
AM
46#include "tree-eh.h"
47#include "tm_p.h"
48#include "flags.h"
d8a2d370 49#include "stringpool.h"
36566b39
PK
50#include "insn-config.h"
51#include "expmed.h"
52#include "dojump.h"
53#include "explow.h"
54#include "calls.h"
55#include "emit-rtl.h"
56#include "stmt.h"
23b2ce53
RS
57#include "expr.h"
58#include "regs.h"
e9a25f70 59#include "recog.h"
e1772ac0 60#include "debug.h"
d23c55c2 61#include "langhooks.h"
b5b8b0ac 62#include "params.h"
d4ebfa65 63#include "target.h"
9b2b7279 64#include "builtins.h"
9021b8ec 65#include "rtl-iter.h"
ca695ac9 66
5fb0e246
RS
67struct target_rtl default_target_rtl;
68#if SWITCHABLE_TARGET
69struct target_rtl *this_target_rtl = &default_target_rtl;
70#endif
71
72#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
73
1d445e9e
ILT
74/* Commonly used modes. */
75
ef4bddc2
RS
76machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
78machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
79machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 80
bd60bab2
JH
81/* Datastructures maintained for currently processed function in RTL form. */
82
3e029763 83struct rtl_data x_rtl;
bd60bab2
JH
84
85/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 86 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
87 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
88 with length attribute nested in top level structures. */
89
90rtx * regno_reg_rtx;
23b2ce53
RS
91
92/* This is *not* reset after each function. It gives each CODE_LABEL
93 in the entire compilation a unique label number. */
94
044b4de3 95static GTY(()) int label_num = 1;
23b2ce53 96
23b2ce53
RS
97/* We record floating-point CONST_DOUBLEs in each floating-point mode for
98 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
99 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
100 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 101
e7c82a99 102rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 103
68d75312
JC
104rtx const_true_rtx;
105
23b2ce53
RS
106REAL_VALUE_TYPE dconst0;
107REAL_VALUE_TYPE dconst1;
108REAL_VALUE_TYPE dconst2;
109REAL_VALUE_TYPE dconstm1;
03f2ea93 110REAL_VALUE_TYPE dconsthalf;
23b2ce53 111
325217ed
CF
112/* Record fixed-point constant 0 and 1. */
113FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
114FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
115
23b2ce53
RS
116/* We make one copy of (const_int C) where C is in
117 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
118 to save space during the compilation and simplify comparisons of
119 integers. */
120
5da077de 121rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 122
ca4adc91
RS
123/* Standard pieces of rtx, to be substituted directly into things. */
124rtx pc_rtx;
125rtx ret_rtx;
126rtx simple_return_rtx;
127rtx cc0_rtx;
128
1476d1bd
MM
129/* Marker used for denoting an INSN, which should never be accessed (i.e.,
130 this pointer should normally never be dereferenced), but is required to be
131 distinct from NULL_RTX. Currently used by peephole2 pass. */
132rtx_insn *invalid_insn_rtx;
133
c13e8210
MM
134/* A hash table storing CONST_INTs whose absolute value is greater
135 than MAX_SAVED_CONST_INT. */
136
6c907cff 137struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
138{
139 typedef HOST_WIDE_INT compare_type;
140
141 static hashval_t hash (rtx i);
142 static bool equal (rtx i, HOST_WIDE_INT h);
143};
c13e8210 144
aebf76a2
TS
145static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
146
6c907cff 147struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
148{
149 static hashval_t hash (rtx x);
150 static bool equal (rtx x, rtx y);
151};
152
153static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
807e902e 154
a560d4d4 155/* A hash table storing register attribute structures. */
6c907cff 156struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
aebf76a2
TS
157{
158 static hashval_t hash (reg_attrs *x);
159 static bool equal (reg_attrs *a, reg_attrs *b);
160};
161
162static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
a560d4d4 163
5692c7bc 164/* A hash table storing all CONST_DOUBLEs. */
6c907cff 165struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
166{
167 static hashval_t hash (rtx x);
168 static bool equal (rtx x, rtx y);
169};
170
171static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
5692c7bc 172
091a3ac7 173/* A hash table storing all CONST_FIXEDs. */
6c907cff 174struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
aebf76a2
TS
175{
176 static hashval_t hash (rtx x);
177 static bool equal (rtx x, rtx y);
178};
179
180static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
091a3ac7 181
3e029763 182#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 183#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 184#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 185
5eb2a9f2 186static void set_used_decls (tree);
502b8322 187static void mark_label_nuses (rtx);
807e902e 188#if TARGET_SUPPORTS_WIDE_INT
807e902e
KZ
189static rtx lookup_const_wide_int (rtx);
190#endif
502b8322 191static rtx lookup_const_double (rtx);
091a3ac7 192static rtx lookup_const_fixed (rtx);
502b8322 193static reg_attrs *get_reg_attrs (tree, int);
ef4bddc2 194static rtx gen_const_vector (machine_mode, int);
32b32b16 195static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 196
6b24c259
JH
197/* Probability of the conditional branch currently proceeded by try_split.
198 Set to -1 otherwise. */
199int split_branch_probability = -1;
ca695ac9 200\f
c13e8210
MM
201/* Returns a hash code for X (which is a really a CONST_INT). */
202
aebf76a2
TS
203hashval_t
204const_int_hasher::hash (rtx x)
c13e8210 205{
aebf76a2 206 return (hashval_t) INTVAL (x);
c13e8210
MM
207}
208
cc2902df 209/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
210 CONST_INT) is the same as that given by Y (which is really a
211 HOST_WIDE_INT *). */
212
aebf76a2
TS
213bool
214const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
c13e8210 215{
aebf76a2 216 return (INTVAL (x) == y);
5692c7bc
ZW
217}
218
807e902e
KZ
219#if TARGET_SUPPORTS_WIDE_INT
220/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
221
aebf76a2
TS
222hashval_t
223const_wide_int_hasher::hash (rtx x)
807e902e
KZ
224{
225 int i;
d7ca26e4 226 unsigned HOST_WIDE_INT hash = 0;
aebf76a2 227 const_rtx xr = x;
807e902e
KZ
228
229 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
230 hash += CONST_WIDE_INT_ELT (xr, i);
231
232 return (hashval_t) hash;
233}
234
235/* Returns nonzero if the value represented by X (which is really a
236 CONST_WIDE_INT) is the same as that given by Y (which is really a
237 CONST_WIDE_INT). */
238
aebf76a2
TS
239bool
240const_wide_int_hasher::equal (rtx x, rtx y)
807e902e
KZ
241{
242 int i;
aebf76a2
TS
243 const_rtx xr = x;
244 const_rtx yr = y;
807e902e 245 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
aebf76a2 246 return false;
807e902e
KZ
247
248 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
249 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
aebf76a2 250 return false;
807e902e 251
aebf76a2 252 return true;
807e902e
KZ
253}
254#endif
255
5692c7bc 256/* Returns a hash code for X (which is really a CONST_DOUBLE). */
aebf76a2
TS
257hashval_t
258const_double_hasher::hash (rtx x)
5692c7bc 259{
aebf76a2 260 const_rtx const value = x;
46b33600 261 hashval_t h;
5692c7bc 262
807e902e 263 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
264 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
265 else
fe352c29 266 {
15c812e3 267 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
268 /* MODE is used in the comparison, so it should be in the hash. */
269 h ^= GET_MODE (value);
270 }
5692c7bc
ZW
271 return h;
272}
273
cc2902df 274/* Returns nonzero if the value represented by X (really a ...)
5692c7bc 275 is the same as that represented by Y (really a ...) */
aebf76a2
TS
276bool
277const_double_hasher::equal (rtx x, rtx y)
5692c7bc 278{
aebf76a2 279 const_rtx const a = x, b = y;
5692c7bc
ZW
280
281 if (GET_MODE (a) != GET_MODE (b))
282 return 0;
807e902e 283 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
284 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
285 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
286 else
287 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
288 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
289}
290
091a3ac7
CF
291/* Returns a hash code for X (which is really a CONST_FIXED). */
292
aebf76a2
TS
293hashval_t
294const_fixed_hasher::hash (rtx x)
091a3ac7 295{
aebf76a2 296 const_rtx const value = x;
091a3ac7
CF
297 hashval_t h;
298
299 h = fixed_hash (CONST_FIXED_VALUE (value));
300 /* MODE is used in the comparison, so it should be in the hash. */
301 h ^= GET_MODE (value);
302 return h;
303}
304
aebf76a2
TS
305/* Returns nonzero if the value represented by X is the same as that
306 represented by Y. */
091a3ac7 307
aebf76a2
TS
308bool
309const_fixed_hasher::equal (rtx x, rtx y)
091a3ac7 310{
aebf76a2 311 const_rtx const a = x, b = y;
091a3ac7
CF
312
313 if (GET_MODE (a) != GET_MODE (b))
314 return 0;
315 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
316}
317
f12144dd 318/* Return true if the given memory attributes are equal. */
c13e8210 319
96b3c03f 320bool
f12144dd 321mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 322{
96b3c03f
RB
323 if (p == q)
324 return true;
325 if (!p || !q)
326 return false;
754c3d5d
RS
327 return (p->alias == q->alias
328 && p->offset_known_p == q->offset_known_p
329 && (!p->offset_known_p || p->offset == q->offset)
330 && p->size_known_p == q->size_known_p
331 && (!p->size_known_p || p->size == q->size)
332 && p->align == q->align
09e881c9 333 && p->addrspace == q->addrspace
78b76d08
SB
334 && (p->expr == q->expr
335 || (p->expr != NULL_TREE && q->expr != NULL_TREE
336 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
337}
338
f12144dd 339/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 340
f12144dd
RS
341static void
342set_mem_attrs (rtx mem, mem_attrs *attrs)
343{
f12144dd
RS
344 /* If everything is the default, we can just clear the attributes. */
345 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
346 {
347 MEM_ATTRS (mem) = 0;
348 return;
349 }
173b24b9 350
84053e02
RB
351 if (!MEM_ATTRS (mem)
352 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 353 {
766090c2 354 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 355 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 356 }
c13e8210
MM
357}
358
a560d4d4
JH
359/* Returns a hash code for X (which is a really a reg_attrs *). */
360
aebf76a2
TS
361hashval_t
362reg_attr_hasher::hash (reg_attrs *x)
a560d4d4 363{
aebf76a2 364 const reg_attrs *const p = x;
a560d4d4 365
9841210f 366 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
367}
368
aebf76a2
TS
369/* Returns nonzero if the value represented by X is the same as that given by
370 Y. */
a560d4d4 371
aebf76a2
TS
372bool
373reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
a560d4d4 374{
aebf76a2
TS
375 const reg_attrs *const p = x;
376 const reg_attrs *const q = y;
a560d4d4
JH
377
378 return (p->decl == q->decl && p->offset == q->offset);
379}
380/* Allocate a new reg_attrs structure and insert it into the hash table if
381 one identical to it is not already in the table. We are doing this for
382 MEM of mode MODE. */
383
384static reg_attrs *
502b8322 385get_reg_attrs (tree decl, int offset)
a560d4d4
JH
386{
387 reg_attrs attrs;
a560d4d4
JH
388
389 /* If everything is the default, we can just return zero. */
390 if (decl == 0 && offset == 0)
391 return 0;
392
393 attrs.decl = decl;
394 attrs.offset = offset;
395
aebf76a2 396 reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
a560d4d4
JH
397 if (*slot == 0)
398 {
766090c2 399 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
400 memcpy (*slot, &attrs, sizeof (reg_attrs));
401 }
402
aebf76a2 403 return *slot;
a560d4d4
JH
404}
405
6fb5fa3c
DB
406
407#if !HAVE_blockage
adddc347
HPN
408/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
409 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
410
411rtx
412gen_blockage (void)
413{
414 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
415 MEM_VOLATILE_P (x) = true;
416 return x;
417}
418#endif
419
420
8deccbb7
RS
421/* Set the mode and register number of X to MODE and REGNO. */
422
423void
424set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
425{
9188b286
RS
426 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
427 ? hard_regno_nregs[regno][mode]
428 : 1);
8deccbb7 429 PUT_MODE_RAW (x, mode);
9188b286 430 set_regno_raw (x, regno, nregs);
8deccbb7
RS
431}
432
08394eef
BS
433/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
434 don't attempt to share with the various global pieces of rtl (such as
435 frame_pointer_rtx). */
436
437rtx
8deccbb7 438gen_raw_REG (machine_mode mode, unsigned int regno)
08394eef 439{
2d44c7de 440 rtx x = rtx_alloc_stat (REG MEM_STAT_INFO);
8deccbb7 441 set_mode_and_regno (x, mode, regno);
9fccb335 442 REG_ATTRS (x) = NULL;
08394eef
BS
443 ORIGINAL_REGNO (x) = regno;
444 return x;
445}
446
c5c76735
JL
447/* There are some RTL codes that require special attention; the generation
448 functions do the raw handling. If you add to this list, modify
449 special_rtx in gengenrtl.c as well. */
450
38e60c55 451rtx_expr_list *
ef4bddc2 452gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
38e60c55
DM
453{
454 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
455 expr_list));
456}
457
a756c6be 458rtx_insn_list *
ef4bddc2 459gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
a756c6be
DM
460{
461 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
462 insn_list));
463}
464
d6e1e8b8 465rtx_insn *
ef4bddc2 466gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
d6e1e8b8
DM
467 basic_block bb, rtx pattern, int location, int code,
468 rtx reg_notes)
469{
470 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
471 prev_insn, next_insn,
472 bb, pattern, location, code,
473 reg_notes));
474}
475
3b80f6ca 476rtx
ef4bddc2 477gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca
RH
478{
479 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 480 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
481
482#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
483 if (const_true_rtx && arg == STORE_FLAG_VALUE)
484 return const_true_rtx;
485#endif
486
c13e8210 487 /* Look up the CONST_INT in the hash table. */
aebf76a2
TS
488 rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
489 INSERT);
29105cea 490 if (*slot == 0)
1f8f4a0b 491 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210 492
aebf76a2 493 return *slot;
3b80f6ca
RH
494}
495
2496c7bd 496rtx
ef4bddc2 497gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
2496c7bd
LB
498{
499 return GEN_INT (trunc_int_for_mode (c, mode));
500}
501
5692c7bc
ZW
502/* CONST_DOUBLEs might be created from pairs of integers, or from
503 REAL_VALUE_TYPEs. Also, their length is known only at run time,
504 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
505
506/* Determine whether REAL, a CONST_DOUBLE, already exists in the
507 hash table. If so, return its counterpart; otherwise add it
508 to the hash table and return it. */
509static rtx
502b8322 510lookup_const_double (rtx real)
5692c7bc 511{
aebf76a2 512 rtx *slot = const_double_htab->find_slot (real, INSERT);
5692c7bc
ZW
513 if (*slot == 0)
514 *slot = real;
515
aebf76a2 516 return *slot;
5692c7bc 517}
29105cea 518
5692c7bc
ZW
519/* Return a CONST_DOUBLE rtx for a floating-point value specified by
520 VALUE in mode MODE. */
0133b7d9 521rtx
ef4bddc2 522const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
0133b7d9 523{
5692c7bc
ZW
524 rtx real = rtx_alloc (CONST_DOUBLE);
525 PUT_MODE (real, mode);
526
9e254451 527 real->u.rv = value;
5692c7bc
ZW
528
529 return lookup_const_double (real);
530}
531
091a3ac7
CF
532/* Determine whether FIXED, a CONST_FIXED, already exists in the
533 hash table. If so, return its counterpart; otherwise add it
534 to the hash table and return it. */
535
536static rtx
537lookup_const_fixed (rtx fixed)
538{
aebf76a2 539 rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
091a3ac7
CF
540 if (*slot == 0)
541 *slot = fixed;
542
aebf76a2 543 return *slot;
091a3ac7
CF
544}
545
546/* Return a CONST_FIXED rtx for a fixed-point value specified by
547 VALUE in mode MODE. */
548
549rtx
ef4bddc2 550const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
091a3ac7
CF
551{
552 rtx fixed = rtx_alloc (CONST_FIXED);
553 PUT_MODE (fixed, mode);
554
555 fixed->u.fv = value;
556
557 return lookup_const_fixed (fixed);
558}
559
807e902e 560#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
561/* Constructs double_int from rtx CST. */
562
563double_int
564rtx_to_double_int (const_rtx cst)
565{
566 double_int r;
567
568 if (CONST_INT_P (cst))
27bcd47c 569 r = double_int::from_shwi (INTVAL (cst));
48175537 570 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
571 {
572 r.low = CONST_DOUBLE_LOW (cst);
573 r.high = CONST_DOUBLE_HIGH (cst);
574 }
575 else
576 gcc_unreachable ();
577
578 return r;
579}
807e902e 580#endif
3e93ff81 581
807e902e
KZ
582#if TARGET_SUPPORTS_WIDE_INT
583/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
584 If so, return its counterpart; otherwise add it to the hash table and
585 return it. */
3e93ff81 586
807e902e
KZ
587static rtx
588lookup_const_wide_int (rtx wint)
589{
aebf76a2 590 rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
807e902e
KZ
591 if (*slot == 0)
592 *slot = wint;
593
aebf76a2 594 return *slot;
807e902e
KZ
595}
596#endif
597
598/* Return an rtx constant for V, given that the constant has mode MODE.
599 The returned rtx will be a CONST_INT if V fits, otherwise it will be
600 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
601 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0
AS
602
603rtx
ef4bddc2 604immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
54fb1ae0 605{
807e902e
KZ
606 unsigned int len = v.get_len ();
607 unsigned int prec = GET_MODE_PRECISION (mode);
608
609 /* Allow truncation but not extension since we do not know if the
610 number is signed or unsigned. */
611 gcc_assert (prec <= v.get_precision ());
612
613 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
614 return gen_int_mode (v.elt (0), mode);
615
616#if TARGET_SUPPORTS_WIDE_INT
617 {
618 unsigned int i;
619 rtx value;
620 unsigned int blocks_needed
621 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
622
623 if (len > blocks_needed)
624 len = blocks_needed;
625
626 value = const_wide_int_alloc (len);
627
628 /* It is so tempting to just put the mode in here. Must control
629 myself ... */
630 PUT_MODE (value, VOIDmode);
631 CWI_PUT_NUM_ELEM (value, len);
632
633 for (i = 0; i < len; i++)
634 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
635
636 return lookup_const_wide_int (value);
637 }
638#else
639 return immed_double_const (v.elt (0), v.elt (1), mode);
640#endif
54fb1ae0
AS
641}
642
807e902e 643#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
644/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
645 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 646 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
647 implied upper bits are copies of the high bit of i1. The value
648 itself is neither signed nor unsigned. Do not use this routine for
649 non-integer modes; convert to REAL_VALUE_TYPE and use
650 CONST_DOUBLE_FROM_REAL_VALUE. */
5692c7bc
ZW
651
652rtx
ef4bddc2 653immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
5692c7bc
ZW
654{
655 rtx value;
656 unsigned int i;
657
65acccdd 658 /* There are the following cases (note that there are no modes with
49ab6098 659 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
660
661 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
662 gen_int_mode.
929e10f4
MS
663 2) If the value of the integer fits into HOST_WIDE_INT anyway
664 (i.e., i1 consists only from copies of the sign bit, and sign
665 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 666 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
667 if (mode != VOIDmode)
668 {
5b0264cb
NS
669 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
670 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
671 /* We can get a 0 for an error mark. */
672 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
d5e254e1
IE
673 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
674 || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
5692c7bc 675
65acccdd
ZD
676 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
677 return gen_int_mode (i0, mode);
5692c7bc
ZW
678 }
679
680 /* If this integer fits in one word, return a CONST_INT. */
681 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
682 return GEN_INT (i0);
683
684 /* We use VOIDmode for integers. */
685 value = rtx_alloc (CONST_DOUBLE);
686 PUT_MODE (value, VOIDmode);
687
688 CONST_DOUBLE_LOW (value) = i0;
689 CONST_DOUBLE_HIGH (value) = i1;
690
691 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
692 XWINT (value, i) = 0;
693
694 return lookup_const_double (value);
0133b7d9 695}
807e902e 696#endif
0133b7d9 697
3b80f6ca 698rtx
ef4bddc2 699gen_rtx_REG (machine_mode mode, unsigned int regno)
3b80f6ca
RH
700{
701 /* In case the MD file explicitly references the frame pointer, have
702 all such references point to the same frame pointer. This is
703 used during frame pointer elimination to distinguish the explicit
704 references to these registers from pseudos that happened to be
705 assigned to them.
706
707 If we have eliminated the frame pointer or arg pointer, we will
708 be using it as a normal register, for example as a spill
709 register. In such cases, we might be accessing it in a mode that
710 is not Pmode and therefore cannot use the pre-allocated rtx.
711
712 Also don't do this when we are making new REGs in reload, since
713 we don't want to get confused with the real pointers. */
714
55a2c322 715 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 716 {
e10c79fe
LB
717 if (regno == FRAME_POINTER_REGNUM
718 && (!reload_completed || frame_pointer_needed))
3b80f6ca 719 return frame_pointer_rtx;
c3e08036
TS
720
721 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
722 && regno == HARD_FRAME_POINTER_REGNUM
e10c79fe 723 && (!reload_completed || frame_pointer_needed))
3b80f6ca 724 return hard_frame_pointer_rtx;
3f393fc6
TS
725#if !HARD_FRAME_POINTER_IS_ARG_POINTER
726 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
727 && regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
728 return arg_pointer_rtx;
729#endif
730#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 731 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
732 return return_address_pointer_rtx;
733#endif
fc555370 734 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 735 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 736 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 737 return pic_offset_table_rtx;
bcb33994 738 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
739 return stack_pointer_rtx;
740 }
741
006a94b0 742#if 0
6cde4876 743 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
744 an existing entry in that table to avoid useless generation of RTL.
745
746 This code is disabled for now until we can fix the various backends
747 which depend on having non-shared hard registers in some cases. Long
748 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
749 on the amount of useless RTL that gets generated.
750
751 We'll also need to fix some code that runs after reload that wants to
752 set ORIGINAL_REGNO. */
753
6cde4876
JL
754 if (cfun
755 && cfun->emit
756 && regno_reg_rtx
757 && regno < FIRST_PSEUDO_REGISTER
758 && reg_raw_mode[regno] == mode)
759 return regno_reg_rtx[regno];
006a94b0 760#endif
6cde4876 761
08394eef 762 return gen_raw_REG (mode, regno);
3b80f6ca
RH
763}
764
41472af8 765rtx
ef4bddc2 766gen_rtx_MEM (machine_mode mode, rtx addr)
41472af8
MM
767{
768 rtx rt = gen_rtx_raw_MEM (mode, addr);
769
770 /* This field is not cleared by the mere allocation of the rtx, so
771 we clear it here. */
173b24b9 772 MEM_ATTRS (rt) = 0;
41472af8
MM
773
774 return rt;
775}
ddef6bc7 776
542a8afa
RH
777/* Generate a memory referring to non-trapping constant memory. */
778
779rtx
ef4bddc2 780gen_const_mem (machine_mode mode, rtx addr)
542a8afa
RH
781{
782 rtx mem = gen_rtx_MEM (mode, addr);
783 MEM_READONLY_P (mem) = 1;
784 MEM_NOTRAP_P (mem) = 1;
785 return mem;
786}
787
bf877a76
R
788/* Generate a MEM referring to fixed portions of the frame, e.g., register
789 save areas. */
790
791rtx
ef4bddc2 792gen_frame_mem (machine_mode mode, rtx addr)
bf877a76
R
793{
794 rtx mem = gen_rtx_MEM (mode, addr);
795 MEM_NOTRAP_P (mem) = 1;
796 set_mem_alias_set (mem, get_frame_alias_set ());
797 return mem;
798}
799
800/* Generate a MEM referring to a temporary use of the stack, not part
801 of the fixed stack frame. For example, something which is pushed
802 by a target splitter. */
803rtx
ef4bddc2 804gen_tmp_stack_mem (machine_mode mode, rtx addr)
bf877a76
R
805{
806 rtx mem = gen_rtx_MEM (mode, addr);
807 MEM_NOTRAP_P (mem) = 1;
e3b5732b 808 if (!cfun->calls_alloca)
bf877a76
R
809 set_mem_alias_set (mem, get_frame_alias_set ());
810 return mem;
811}
812
beb72684
RH
813/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
814 this construct would be valid, and false otherwise. */
815
816bool
ef4bddc2 817validate_subreg (machine_mode omode, machine_mode imode,
ed7a4b4b 818 const_rtx reg, unsigned int offset)
ddef6bc7 819{
beb72684
RH
820 unsigned int isize = GET_MODE_SIZE (imode);
821 unsigned int osize = GET_MODE_SIZE (omode);
822
823 /* All subregs must be aligned. */
824 if (offset % osize != 0)
825 return false;
826
827 /* The subreg offset cannot be outside the inner object. */
828 if (offset >= isize)
829 return false;
830
831 /* ??? This should not be here. Temporarily continue to allow word_mode
832 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
833 Generally, backends are doing something sketchy but it'll take time to
834 fix them all. */
835 if (omode == word_mode)
836 ;
837 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
838 is the culprit here, and not the backends. */
839 else if (osize >= UNITS_PER_WORD && isize >= osize)
840 ;
841 /* Allow component subregs of complex and vector. Though given the below
842 extraction rules, it's not always clear what that means. */
843 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
844 && GET_MODE_INNER (imode) == omode)
845 ;
846 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
847 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
848 represent this. It's questionable if this ought to be represented at
849 all -- why can't this all be hidden in post-reload splitters that make
850 arbitrarily mode changes to the registers themselves. */
851 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
852 ;
853 /* Subregs involving floating point modes are not allowed to
854 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
855 (subreg:SI (reg:DF) 0) isn't. */
856 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
857 {
55a2c322
VM
858 if (! (isize == osize
859 /* LRA can use subreg to store a floating point value in
860 an integer mode. Although the floating point and the
861 integer modes need the same number of hard registers,
862 the size of floating point mode can be less than the
863 integer mode. LRA also uses subregs for a register
864 should be used in different mode in on insn. */
865 || lra_in_progress))
beb72684
RH
866 return false;
867 }
ddef6bc7 868
beb72684
RH
869 /* Paradoxical subregs must have offset zero. */
870 if (osize > isize)
871 return offset == 0;
872
873 /* This is a normal subreg. Verify that the offset is representable. */
874
875 /* For hard registers, we already have most of these rules collected in
876 subreg_offset_representable_p. */
877 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
878 {
879 unsigned int regno = REGNO (reg);
880
881#ifdef CANNOT_CHANGE_MODE_CLASS
882 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
883 && GET_MODE_INNER (imode) == omode)
884 ;
885 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
886 return false;
ddef6bc7 887#endif
beb72684
RH
888
889 return subreg_offset_representable_p (regno, imode, offset, omode);
890 }
891
892 /* For pseudo registers, we want most of the same checks. Namely:
893 If the register no larger than a word, the subreg must be lowpart.
894 If the register is larger than a word, the subreg must be the lowpart
895 of a subword. A subreg does *not* perform arbitrary bit extraction.
896 Given that we've already checked mode/offset alignment, we only have
897 to check subword subregs here. */
55a2c322
VM
898 if (osize < UNITS_PER_WORD
899 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684 900 {
ef4bddc2 901 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
beb72684
RH
902 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
903 if (offset % UNITS_PER_WORD != low_off)
904 return false;
905 }
906 return true;
907}
908
909rtx
ef4bddc2 910gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
beb72684
RH
911{
912 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 913 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
914}
915
173b24b9
RK
916/* Generate a SUBREG representing the least-significant part of REG if MODE
917 is smaller than mode of REG, otherwise paradoxical SUBREG. */
918
ddef6bc7 919rtx
ef4bddc2 920gen_lowpart_SUBREG (machine_mode mode, rtx reg)
ddef6bc7 921{
ef4bddc2 922 machine_mode inmode;
ddef6bc7
JJ
923
924 inmode = GET_MODE (reg);
925 if (inmode == VOIDmode)
926 inmode = mode;
e0e08ac2
JH
927 return gen_rtx_SUBREG (mode, reg,
928 subreg_lowpart_offset (mode, inmode));
ddef6bc7 929}
fcc74520
RS
930
931rtx
ef4bddc2 932gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
fcc74520
RS
933 enum var_init_status status)
934{
935 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
936 PAT_VAR_LOCATION_STATUS (x) = status;
937 return x;
938}
c5c76735 939\f
23b2ce53 940
80379f51
PB
941/* Create an rtvec and stores within it the RTXen passed in the arguments. */
942
23b2ce53 943rtvec
e34d07f2 944gen_rtvec (int n, ...)
23b2ce53 945{
80379f51
PB
946 int i;
947 rtvec rt_val;
e34d07f2 948 va_list p;
23b2ce53 949
e34d07f2 950 va_start (p, n);
23b2ce53 951
80379f51 952 /* Don't allocate an empty rtvec... */
23b2ce53 953 if (n == 0)
0edf1bb2
JL
954 {
955 va_end (p);
956 return NULL_RTVEC;
957 }
23b2ce53 958
80379f51 959 rt_val = rtvec_alloc (n);
4f90e4a0 960
23b2ce53 961 for (i = 0; i < n; i++)
80379f51 962 rt_val->elem[i] = va_arg (p, rtx);
6268b922 963
e34d07f2 964 va_end (p);
80379f51 965 return rt_val;
23b2ce53
RS
966}
967
968rtvec
502b8322 969gen_rtvec_v (int n, rtx *argp)
23b2ce53 970{
b3694847
SS
971 int i;
972 rtvec rt_val;
23b2ce53 973
80379f51 974 /* Don't allocate an empty rtvec... */
23b2ce53 975 if (n == 0)
80379f51 976 return NULL_RTVEC;
23b2ce53 977
80379f51 978 rt_val = rtvec_alloc (n);
23b2ce53
RS
979
980 for (i = 0; i < n; i++)
8f985ec4 981 rt_val->elem[i] = *argp++;
23b2ce53
RS
982
983 return rt_val;
984}
e6eda746
DM
985
986rtvec
987gen_rtvec_v (int n, rtx_insn **argp)
988{
989 int i;
990 rtvec rt_val;
991
992 /* Don't allocate an empty rtvec... */
993 if (n == 0)
994 return NULL_RTVEC;
995
996 rt_val = rtvec_alloc (n);
997
998 for (i = 0; i < n; i++)
999 rt_val->elem[i] = *argp++;
1000
1001 return rt_val;
1002}
1003
23b2ce53 1004\f
38ae7651
RS
1005/* Return the number of bytes between the start of an OUTER_MODE
1006 in-memory value and the start of an INNER_MODE in-memory value,
1007 given that the former is a lowpart of the latter. It may be a
1008 paradoxical lowpart, in which case the offset will be negative
1009 on big-endian targets. */
1010
1011int
ef4bddc2
RS
1012byte_lowpart_offset (machine_mode outer_mode,
1013 machine_mode inner_mode)
38ae7651
RS
1014{
1015 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1016 return subreg_lowpart_offset (outer_mode, inner_mode);
1017 else
1018 return -subreg_lowpart_offset (inner_mode, outer_mode);
1019}
1020\f
23b2ce53
RS
1021/* Generate a REG rtx for a new pseudo register of mode MODE.
1022 This pseudo is assigned the next sequential register number. */
1023
1024rtx
ef4bddc2 1025gen_reg_rtx (machine_mode mode)
23b2ce53 1026{
b3694847 1027 rtx val;
2e3f842f 1028 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 1029
f8335a4f 1030 gcc_assert (can_create_pseudo_p ());
23b2ce53 1031
2e3f842f
L
1032 /* If a virtual register with bigger mode alignment is generated,
1033 increase stack alignment estimation because it might be spilled
1034 to stack later. */
b8698a0f 1035 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
1036 && crtl->stack_alignment_estimated < align
1037 && !crtl->stack_realign_processed)
ae58e548
JJ
1038 {
1039 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1040 if (crtl->stack_alignment_estimated < min_align)
1041 crtl->stack_alignment_estimated = min_align;
1042 }
2e3f842f 1043
1b3d8f8a
GK
1044 if (generating_concat_p
1045 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1046 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
1047 {
1048 /* For complex modes, don't make a single pseudo.
1049 Instead, make a CONCAT of two pseudos.
1050 This allows noncontiguous allocation of the real and imaginary parts,
1051 which makes much better code. Besides, allocating DCmode
1052 pseudos overstrains reload on some machines like the 386. */
1053 rtx realpart, imagpart;
ef4bddc2 1054 machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1055
1056 realpart = gen_reg_rtx (partmode);
1057 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1058 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1059 }
1060
004a7e45
UB
1061 /* Do not call gen_reg_rtx with uninitialized crtl. */
1062 gcc_assert (crtl->emit.regno_pointer_align_length);
1063
a560d4d4 1064 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 1065 enough to have an element for this pseudo reg number. */
23b2ce53 1066
3e029763 1067 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 1068 {
3e029763 1069 int old_size = crtl->emit.regno_pointer_align_length;
60564289 1070 char *tmp;
0d4903b8 1071 rtx *new1;
0d4903b8 1072
60564289
KG
1073 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1074 memset (tmp + old_size, 0, old_size);
1075 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 1076
1b4572a8 1077 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 1078 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
1079 regno_reg_rtx = new1;
1080
3e029763 1081 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
1082 }
1083
08394eef 1084 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
1085 regno_reg_rtx[reg_rtx_no++] = val;
1086 return val;
1087}
1088
a698cc03
JL
1089/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1090
1091bool
1092reg_is_parm_p (rtx reg)
1093{
1094 tree decl;
1095
1096 gcc_assert (REG_P (reg));
1097 decl = REG_EXPR (reg);
1098 return (decl && TREE_CODE (decl) == PARM_DECL);
1099}
1100
38ae7651
RS
1101/* Update NEW with the same attributes as REG, but with OFFSET added
1102 to the REG_OFFSET. */
a560d4d4 1103
e53a16e7 1104static void
60564289 1105update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 1106{
60564289 1107 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 1108 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1109}
1110
38ae7651
RS
1111/* Generate a register with same attributes as REG, but with OFFSET
1112 added to the REG_OFFSET. */
e53a16e7
ILT
1113
1114rtx
ef4bddc2 1115gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
e53a16e7
ILT
1116 int offset)
1117{
60564289 1118 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1119
60564289
KG
1120 update_reg_offset (new_rtx, reg, offset);
1121 return new_rtx;
e53a16e7
ILT
1122}
1123
1124/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1125 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1126
1127rtx
ef4bddc2 1128gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
e53a16e7 1129{
60564289 1130 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1131
60564289
KG
1132 update_reg_offset (new_rtx, reg, offset);
1133 return new_rtx;
a560d4d4
JH
1134}
1135
38ae7651
RS
1136/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1137 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1138
1139void
ef4bddc2 1140adjust_reg_mode (rtx reg, machine_mode mode)
a560d4d4 1141{
38ae7651
RS
1142 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1143 PUT_MODE (reg, mode);
1144}
1145
1146/* Copy REG's attributes from X, if X has any attributes. If REG and X
1147 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1148
1149void
1150set_reg_attrs_from_value (rtx reg, rtx x)
1151{
1152 int offset;
de6f3f7a
L
1153 bool can_be_reg_pointer = true;
1154
1155 /* Don't call mark_reg_pointer for incompatible pointer sign
1156 extension. */
1157 while (GET_CODE (x) == SIGN_EXTEND
1158 || GET_CODE (x) == ZERO_EXTEND
1159 || GET_CODE (x) == TRUNCATE
1160 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1161 {
1162#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1163 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1164 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1165 can_be_reg_pointer = false;
1166#endif
1167 x = XEXP (x, 0);
1168 }
38ae7651 1169
923ba36f
JJ
1170 /* Hard registers can be reused for multiple purposes within the same
1171 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1172 on them is wrong. */
1173 if (HARD_REGISTER_P (reg))
1174 return;
1175
38ae7651 1176 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1177 if (MEM_P (x))
1178 {
527210c4
RS
1179 if (MEM_OFFSET_KNOWN_P (x))
1180 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1181 MEM_OFFSET (x) + offset);
de6f3f7a 1182 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1183 mark_reg_pointer (reg, 0);
46b71b03
PB
1184 }
1185 else if (REG_P (x))
1186 {
1187 if (REG_ATTRS (x))
1188 update_reg_offset (reg, x, offset);
de6f3f7a 1189 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1190 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1191 }
1192}
1193
1194/* Generate a REG rtx for a new pseudo register, copying the mode
1195 and attributes from X. */
1196
1197rtx
1198gen_reg_rtx_and_attrs (rtx x)
1199{
1200 rtx reg = gen_reg_rtx (GET_MODE (x));
1201 set_reg_attrs_from_value (reg, x);
1202 return reg;
a560d4d4
JH
1203}
1204
9d18e06b
JZ
1205/* Set the register attributes for registers contained in PARM_RTX.
1206 Use needed values from memory attributes of MEM. */
1207
1208void
502b8322 1209set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1210{
f8cfc6aa 1211 if (REG_P (parm_rtx))
38ae7651 1212 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1213 else if (GET_CODE (parm_rtx) == PARALLEL)
1214 {
1215 /* Check for a NULL entry in the first slot, used to indicate that the
1216 parameter goes both on the stack and in registers. */
1217 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1218 for (; i < XVECLEN (parm_rtx, 0); i++)
1219 {
1220 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1221 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1222 REG_ATTRS (XEXP (x, 0))
1223 = get_reg_attrs (MEM_EXPR (mem),
1224 INTVAL (XEXP (x, 1)));
1225 }
1226 }
1227}
1228
38ae7651
RS
1229/* Set the REG_ATTRS for registers in value X, given that X represents
1230 decl T. */
a560d4d4 1231
4e3825db 1232void
38ae7651
RS
1233set_reg_attrs_for_decl_rtl (tree t, rtx x)
1234{
1235 if (GET_CODE (x) == SUBREG)
fbe6ec81 1236 {
38ae7651
RS
1237 gcc_assert (subreg_lowpart_p (x));
1238 x = SUBREG_REG (x);
fbe6ec81 1239 }
f8cfc6aa 1240 if (REG_P (x))
38ae7651
RS
1241 REG_ATTRS (x)
1242 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
0f9f9784 1243 DECL_MODE (t)));
a560d4d4
JH
1244 if (GET_CODE (x) == CONCAT)
1245 {
1246 if (REG_P (XEXP (x, 0)))
1247 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1248 if (REG_P (XEXP (x, 1)))
1249 REG_ATTRS (XEXP (x, 1))
1250 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1251 }
1252 if (GET_CODE (x) == PARALLEL)
1253 {
d4afac5b
JZ
1254 int i, start;
1255
1256 /* Check for a NULL entry, used to indicate that the parameter goes
1257 both on the stack and in registers. */
1258 if (XEXP (XVECEXP (x, 0, 0), 0))
1259 start = 0;
1260 else
1261 start = 1;
1262
1263 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1264 {
1265 rtx y = XVECEXP (x, 0, i);
1266 if (REG_P (XEXP (y, 0)))
1267 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1268 }
1269 }
1270}
1271
38ae7651
RS
1272/* Assign the RTX X to declaration T. */
1273
1274void
1275set_decl_rtl (tree t, rtx x)
1276{
1277 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1278 if (x)
1279 set_reg_attrs_for_decl_rtl (t, x);
1280}
1281
5141868d
RS
1282/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1283 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1284
1285void
5141868d 1286set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1287{
1288 DECL_INCOMING_RTL (t) = x;
5141868d 1289 if (x && !by_reference_p)
38ae7651
RS
1290 set_reg_attrs_for_decl_rtl (t, x);
1291}
1292
754fdcca
RK
1293/* Identify REG (which may be a CONCAT) as a user register. */
1294
1295void
502b8322 1296mark_user_reg (rtx reg)
754fdcca
RK
1297{
1298 if (GET_CODE (reg) == CONCAT)
1299 {
1300 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1301 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1302 }
754fdcca 1303 else
5b0264cb
NS
1304 {
1305 gcc_assert (REG_P (reg));
1306 REG_USERVAR_P (reg) = 1;
1307 }
754fdcca
RK
1308}
1309
86fe05e0
RK
1310/* Identify REG as a probable pointer register and show its alignment
1311 as ALIGN, if nonzero. */
23b2ce53
RS
1312
1313void
502b8322 1314mark_reg_pointer (rtx reg, int align)
23b2ce53 1315{
3502dc9c 1316 if (! REG_POINTER (reg))
00995e78 1317 {
3502dc9c 1318 REG_POINTER (reg) = 1;
86fe05e0 1319
00995e78
RE
1320 if (align)
1321 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1322 }
1323 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1324 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1325 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1326}
1327
1328/* Return 1 plus largest pseudo reg number used in the current function. */
1329
1330int
502b8322 1331max_reg_num (void)
23b2ce53
RS
1332{
1333 return reg_rtx_no;
1334}
1335
1336/* Return 1 + the largest label number used so far in the current function. */
1337
1338int
502b8322 1339max_label_num (void)
23b2ce53 1340{
23b2ce53
RS
1341 return label_num;
1342}
1343
1344/* Return first label number used in this function (if any were used). */
1345
1346int
502b8322 1347get_first_label_num (void)
23b2ce53
RS
1348{
1349 return first_label_num;
1350}
6de9cd9a
DN
1351
1352/* If the rtx for label was created during the expansion of a nested
1353 function, then first_label_num won't include this label number.
fa10beec 1354 Fix this now so that array indices work later. */
6de9cd9a
DN
1355
1356void
1357maybe_set_first_label_num (rtx x)
1358{
1359 if (CODE_LABEL_NUMBER (x) < first_label_num)
1360 first_label_num = CODE_LABEL_NUMBER (x);
1361}
23b2ce53
RS
1362\f
1363/* Return a value representing some low-order bits of X, where the number
1364 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1365 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1366 representation is returned.
1367
1368 This function handles the cases in common between gen_lowpart, below,
1369 and two variants in cse.c and combine.c. These are the cases that can
1370 be safely handled at all points in the compilation.
1371
1372 If this is not a case we can handle, return 0. */
1373
1374rtx
ef4bddc2 1375gen_lowpart_common (machine_mode mode, rtx x)
23b2ce53 1376{
ddef6bc7 1377 int msize = GET_MODE_SIZE (mode);
550d1387 1378 int xsize;
ddef6bc7 1379 int offset = 0;
ef4bddc2 1380 machine_mode innermode;
550d1387
GK
1381
1382 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1383 so we have to make one up. Yuk. */
1384 innermode = GET_MODE (x);
481683e1 1385 if (CONST_INT_P (x)
db487452 1386 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1387 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1388 else if (innermode == VOIDmode)
49ab6098 1389 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1390
550d1387
GK
1391 xsize = GET_MODE_SIZE (innermode);
1392
5b0264cb 1393 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1394
550d1387 1395 if (innermode == mode)
23b2ce53
RS
1396 return x;
1397
1398 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1399 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1400 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1401 return 0;
1402
53501a19 1403 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1404 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1405 return 0;
1406
550d1387 1407 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1408
1409 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1410 && (GET_MODE_CLASS (mode) == MODE_INT
1411 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1412 {
1413 /* If we are getting the low-order part of something that has been
1414 sign- or zero-extended, we can either just use the object being
1415 extended or make a narrower extension. If we want an even smaller
1416 piece than the size of the object being extended, call ourselves
1417 recursively.
1418
1419 This case is used mostly by combine and cse. */
1420
1421 if (GET_MODE (XEXP (x, 0)) == mode)
1422 return XEXP (x, 0);
550d1387 1423 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1424 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1425 else if (msize < xsize)
3b80f6ca 1426 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1427 }
f8cfc6aa 1428 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1429 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1430 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
550d1387 1431 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1432
23b2ce53
RS
1433 /* Otherwise, we can't do this. */
1434 return 0;
1435}
1436\f
ccba022b 1437rtx
ef4bddc2 1438gen_highpart (machine_mode mode, rtx x)
ccba022b 1439{
ddef6bc7 1440 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1441 rtx result;
ddef6bc7 1442
ccba022b
RS
1443 /* This case loses if X is a subreg. To catch bugs early,
1444 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1445 gcc_assert (msize <= UNITS_PER_WORD
1446 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1447
e0e08ac2
JH
1448 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1449 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1450 gcc_assert (result);
b8698a0f 1451
09482e0d
JW
1452 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1453 the target if we have a MEM. gen_highpart must return a valid operand,
1454 emitting code if necessary to do so. */
5b0264cb
NS
1455 if (MEM_P (result))
1456 {
1457 result = validize_mem (result);
1458 gcc_assert (result);
1459 }
b8698a0f 1460
e0e08ac2
JH
1461 return result;
1462}
5222e470 1463
26d249eb 1464/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1465 be VOIDmode constant. */
1466rtx
ef4bddc2 1467gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
5222e470
JH
1468{
1469 if (GET_MODE (exp) != VOIDmode)
1470 {
5b0264cb 1471 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1472 return gen_highpart (outermode, exp);
1473 }
1474 return simplify_gen_subreg (outermode, exp, innermode,
1475 subreg_highpart_offset (outermode, innermode));
1476}
68252e27 1477
38ae7651 1478/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1479
e0e08ac2 1480unsigned int
ef4bddc2 1481subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
e0e08ac2
JH
1482{
1483 unsigned int offset = 0;
1484 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1485
e0e08ac2 1486 if (difference > 0)
ccba022b 1487 {
e0e08ac2
JH
1488 if (WORDS_BIG_ENDIAN)
1489 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1490 if (BYTES_BIG_ENDIAN)
1491 offset += difference % UNITS_PER_WORD;
ccba022b 1492 }
ddef6bc7 1493
e0e08ac2 1494 return offset;
ccba022b 1495}
eea50aa0 1496
e0e08ac2
JH
1497/* Return offset in bytes to get OUTERMODE high part
1498 of the value in mode INNERMODE stored in memory in target format. */
1499unsigned int
ef4bddc2 1500subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
eea50aa0
JH
1501{
1502 unsigned int offset = 0;
1503 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1504
5b0264cb 1505 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1506
eea50aa0
JH
1507 if (difference > 0)
1508 {
e0e08ac2 1509 if (! WORDS_BIG_ENDIAN)
eea50aa0 1510 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1511 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1512 offset += difference % UNITS_PER_WORD;
1513 }
1514
e0e08ac2 1515 return offset;
eea50aa0 1516}
ccba022b 1517
23b2ce53
RS
1518/* Return 1 iff X, assumed to be a SUBREG,
1519 refers to the least significant part of its containing reg.
1520 If X is not a SUBREG, always return 1 (it is its own low part!). */
1521
1522int
fa233e34 1523subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1524{
1525 if (GET_CODE (x) != SUBREG)
1526 return 1;
a3a03040
RK
1527 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1528 return 0;
23b2ce53 1529
e0e08ac2
JH
1530 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1531 == SUBREG_BYTE (x));
23b2ce53 1532}
6a4bdc79
BS
1533
1534/* Return true if X is a paradoxical subreg, false otherwise. */
1535bool
1536paradoxical_subreg_p (const_rtx x)
1537{
1538 if (GET_CODE (x) != SUBREG)
1539 return false;
1540 return (GET_MODE_PRECISION (GET_MODE (x))
1541 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1542}
23b2ce53 1543\f
ddef6bc7
JJ
1544/* Return subword OFFSET of operand OP.
1545 The word number, OFFSET, is interpreted as the word number starting
1546 at the low-order address. OFFSET 0 is the low-order word if not
1547 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1548
1549 If we cannot extract the required word, we return zero. Otherwise,
1550 an rtx corresponding to the requested word will be returned.
1551
1552 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1553 reload has completed, a valid address will always be returned. After
1554 reload, if a valid address cannot be returned, we return zero.
1555
1556 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1557 it is the responsibility of the caller.
1558
1559 MODE is the mode of OP in case it is a CONST_INT.
1560
1561 ??? This is still rather broken for some cases. The problem for the
1562 moment is that all callers of this thing provide no 'goal mode' to
1563 tell us to work with. This exists because all callers were written
0631e0bf
JH
1564 in a word based SUBREG world.
1565 Now use of this function can be deprecated by simplify_subreg in most
1566 cases.
1567 */
ddef6bc7
JJ
1568
1569rtx
ef4bddc2 1570operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
ddef6bc7
JJ
1571{
1572 if (mode == VOIDmode)
1573 mode = GET_MODE (op);
1574
5b0264cb 1575 gcc_assert (mode != VOIDmode);
ddef6bc7 1576
30f7a378 1577 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1578 if (mode != BLKmode
1579 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1580 return 0;
1581
30f7a378 1582 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1583 if (mode != BLKmode
1584 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1585 return const0_rtx;
1586
ddef6bc7 1587 /* Form a new MEM at the requested address. */
3c0cb5de 1588 if (MEM_P (op))
ddef6bc7 1589 {
60564289 1590 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1591
f1ec5147 1592 if (! validate_address)
60564289 1593 return new_rtx;
f1ec5147
RK
1594
1595 else if (reload_completed)
ddef6bc7 1596 {
09e881c9
BE
1597 if (! strict_memory_address_addr_space_p (word_mode,
1598 XEXP (new_rtx, 0),
1599 MEM_ADDR_SPACE (op)))
f1ec5147 1600 return 0;
ddef6bc7 1601 }
f1ec5147 1602 else
60564289 1603 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1604 }
1605
0631e0bf
JH
1606 /* Rest can be handled by simplify_subreg. */
1607 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1608}
1609
535a42b1
NS
1610/* Similar to `operand_subword', but never return 0. If we can't
1611 extract the required subword, put OP into a register and try again.
1612 The second attempt must succeed. We always validate the address in
1613 this case.
23b2ce53
RS
1614
1615 MODE is the mode of OP, in case it is CONST_INT. */
1616
1617rtx
ef4bddc2 1618operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
23b2ce53 1619{
ddef6bc7 1620 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1621
1622 if (result)
1623 return result;
1624
1625 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1626 {
1627 /* If this is a register which can not be accessed by words, copy it
1628 to a pseudo register. */
f8cfc6aa 1629 if (REG_P (op))
77e6b0eb
JC
1630 op = copy_to_reg (op);
1631 else
1632 op = force_reg (mode, op);
1633 }
23b2ce53 1634
ddef6bc7 1635 result = operand_subword (op, offset, 1, mode);
5b0264cb 1636 gcc_assert (result);
23b2ce53
RS
1637
1638 return result;
1639}
1640\f
2b3493c8
AK
1641/* Returns 1 if both MEM_EXPR can be considered equal
1642 and 0 otherwise. */
1643
1644int
4f588890 1645mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1646{
1647 if (expr1 == expr2)
1648 return 1;
1649
1650 if (! expr1 || ! expr2)
1651 return 0;
1652
1653 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1654 return 0;
1655
55b34b5f 1656 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1657}
1658
805903b5
JJ
1659/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1660 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1661 -1 if not known. */
1662
1663int
d9223014 1664get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1665{
1666 tree expr;
1667 unsigned HOST_WIDE_INT offset;
1668
1669 /* This function can't use
527210c4 1670 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1671 || (MAX (MEM_ALIGN (mem),
0eb77834 1672 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1673 < align))
1674 return -1;
1675 else
527210c4 1676 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1677 for two reasons:
1678 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1679 for <variable>. get_inner_reference doesn't handle it and
1680 even if it did, the alignment in that case needs to be determined
1681 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1682 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1683 isn't sufficiently aligned, the object it is in might be. */
1684 gcc_assert (MEM_P (mem));
1685 expr = MEM_EXPR (mem);
527210c4 1686 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1687 return -1;
1688
527210c4 1689 offset = MEM_OFFSET (mem);
805903b5
JJ
1690 if (DECL_P (expr))
1691 {
1692 if (DECL_ALIGN (expr) < align)
1693 return -1;
1694 }
1695 else if (INDIRECT_REF_P (expr))
1696 {
1697 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1698 return -1;
1699 }
1700 else if (TREE_CODE (expr) == COMPONENT_REF)
1701 {
1702 while (1)
1703 {
1704 tree inner = TREE_OPERAND (expr, 0);
1705 tree field = TREE_OPERAND (expr, 1);
1706 tree byte_offset = component_ref_field_offset (expr);
1707 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1708
1709 if (!byte_offset
cc269bb6
RS
1710 || !tree_fits_uhwi_p (byte_offset)
1711 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1712 return -1;
1713
ae7e9ddd
RS
1714 offset += tree_to_uhwi (byte_offset);
1715 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1716
1717 if (inner == NULL_TREE)
1718 {
1719 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1720 < (unsigned int) align)
1721 return -1;
1722 break;
1723 }
1724 else if (DECL_P (inner))
1725 {
1726 if (DECL_ALIGN (inner) < align)
1727 return -1;
1728 break;
1729 }
1730 else if (TREE_CODE (inner) != COMPONENT_REF)
1731 return -1;
1732 expr = inner;
1733 }
1734 }
1735 else
1736 return -1;
1737
1738 return offset & ((align / BITS_PER_UNIT) - 1);
1739}
1740
6926c713 1741/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1742 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1743 if we are making a new object of this type. BITPOS is nonzero if
1744 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1745
1746void
502b8322
AJ
1747set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1748 HOST_WIDE_INT bitpos)
173b24b9 1749{
6f1087be 1750 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1751 tree type;
f12144dd 1752 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1753 addr_space_t as;
173b24b9
RK
1754
1755 /* It can happen that type_for_mode was given a mode for which there
1756 is no language-level type. In which case it returns NULL, which
1757 we can see here. */
1758 if (t == NULL_TREE)
1759 return;
1760
1761 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1762 if (type == error_mark_node)
1763 return;
173b24b9 1764
173b24b9
RK
1765 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1766 wrong answer, as it assumes that DECL_RTL already has the right alias
1767 info. Callers should not set DECL_RTL until after the call to
1768 set_mem_attributes. */
5b0264cb 1769 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1770
f12144dd
RS
1771 memset (&attrs, 0, sizeof (attrs));
1772
738cc472 1773 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1774 front-end routine) and use it. */
f12144dd 1775 attrs.alias = get_alias_set (t);
173b24b9 1776
a5e9c810 1777 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1778 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1779
268f7033 1780 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1781 refattrs = MEM_ATTRS (ref);
1782 if (refattrs)
268f7033
UW
1783 {
1784 /* ??? Can this ever happen? Calling this routine on a MEM that
1785 already carries memory attributes should probably be invalid. */
f12144dd 1786 attrs.expr = refattrs->expr;
754c3d5d 1787 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1788 attrs.offset = refattrs->offset;
754c3d5d 1789 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1790 attrs.size = refattrs->size;
1791 attrs.align = refattrs->align;
268f7033
UW
1792 }
1793
1794 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1795 else
268f7033 1796 {
f12144dd
RS
1797 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1798 gcc_assert (!defattrs->expr);
754c3d5d 1799 gcc_assert (!defattrs->offset_known_p);
f12144dd 1800
268f7033 1801 /* Respect mode size. */
754c3d5d 1802 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1803 attrs.size = defattrs->size;
268f7033
UW
1804 /* ??? Is this really necessary? We probably should always get
1805 the size from the type below. */
1806
1807 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1808 if T is an object, always compute the object alignment below. */
f12144dd
RS
1809 if (TYPE_P (t))
1810 attrs.align = defattrs->align;
1811 else
1812 attrs.align = BITS_PER_UNIT;
268f7033
UW
1813 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1814 e.g. if the type carries an alignment attribute. Should we be
1815 able to simply always use TYPE_ALIGN? */
1816 }
1817
c3d32120
RK
1818 /* We can set the alignment from the type if we are making an object,
1819 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1820 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1821 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1822
738cc472 1823 /* If the size is known, we can set that. */
a787ccc3 1824 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1825
30b0317c
RB
1826 /* The address-space is that of the type. */
1827 as = TYPE_ADDR_SPACE (type);
1828
80965c18
RK
1829 /* If T is not a type, we may be able to deduce some more information about
1830 the expression. */
1831 if (! TYPE_P (t))
8ac61af7 1832 {
8476af98 1833 tree base;
389fdba0 1834
8ac61af7
RK
1835 if (TREE_THIS_VOLATILE (t))
1836 MEM_VOLATILE_P (ref) = 1;
173b24b9 1837
c56e3582
RK
1838 /* Now remove any conversions: they don't change what the underlying
1839 object is. Likewise for SAVE_EXPR. */
1043771b 1840 while (CONVERT_EXPR_P (t)
c56e3582
RK
1841 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1842 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1843 t = TREE_OPERAND (t, 0);
1844
4994da65
RG
1845 /* Note whether this expression can trap. */
1846 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1847
1848 base = get_base_address (t);
f18a7b25
MJ
1849 if (base)
1850 {
1851 if (DECL_P (base)
1852 && TREE_READONLY (base)
1853 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1854 && !TREE_THIS_VOLATILE (base))
1855 MEM_READONLY_P (ref) = 1;
1856
1857 /* Mark static const strings readonly as well. */
1858 if (TREE_CODE (base) == STRING_CST
1859 && TREE_READONLY (base)
1860 && TREE_STATIC (base))
1861 MEM_READONLY_P (ref) = 1;
1862
30b0317c 1863 /* Address-space information is on the base object. */
f18a7b25
MJ
1864 if (TREE_CODE (base) == MEM_REF
1865 || TREE_CODE (base) == TARGET_MEM_REF)
1866 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1867 0))));
1868 else
1869 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1870 }
ba30e50d 1871
2039d7aa
RH
1872 /* If this expression uses it's parent's alias set, mark it such
1873 that we won't change it. */
b4ada065 1874 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
1875 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1876
8ac61af7
RK
1877 /* If this is a decl, set the attributes of the MEM from it. */
1878 if (DECL_P (t))
1879 {
f12144dd 1880 attrs.expr = t;
754c3d5d
RS
1881 attrs.offset_known_p = true;
1882 attrs.offset = 0;
6f1087be 1883 apply_bitpos = bitpos;
a787ccc3 1884 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1885 }
1886
30b0317c 1887 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1888 else if (CONSTANT_CLASS_P (t))
30b0317c 1889 ;
998d7deb 1890
a787ccc3
RS
1891 /* If this is a field reference, record it. */
1892 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1893 {
f12144dd 1894 attrs.expr = t;
754c3d5d
RS
1895 attrs.offset_known_p = true;
1896 attrs.offset = 0;
6f1087be 1897 apply_bitpos = bitpos;
a787ccc3
RS
1898 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1899 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1900 }
1901
1902 /* If this is an array reference, look for an outer field reference. */
1903 else if (TREE_CODE (t) == ARRAY_REF)
1904 {
1905 tree off_tree = size_zero_node;
1b1838b6
JW
1906 /* We can't modify t, because we use it at the end of the
1907 function. */
1908 tree t2 = t;
998d7deb
RH
1909
1910 do
1911 {
1b1838b6 1912 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1913 tree low_bound = array_ref_low_bound (t2);
1914 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1915
1916 /* We assume all arrays have sizes that are a multiple of a byte.
1917 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1918 index, then convert to sizetype and multiply by the size of
1919 the array element. */
1920 if (! integer_zerop (low_bound))
4845b383
KH
1921 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1922 index, low_bound);
2567406a 1923
44de5aeb 1924 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1925 size_binop (MULT_EXPR,
1926 fold_convert (sizetype,
1927 index),
44de5aeb
RK
1928 unit_size),
1929 off_tree);
1b1838b6 1930 t2 = TREE_OPERAND (t2, 0);
998d7deb 1931 }
1b1838b6 1932 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1933
30b0317c
RB
1934 if (DECL_P (t2)
1935 || TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1936 {
f12144dd 1937 attrs.expr = t2;
754c3d5d 1938 attrs.offset_known_p = false;
cc269bb6 1939 if (tree_fits_uhwi_p (off_tree))
6f1087be 1940 {
754c3d5d 1941 attrs.offset_known_p = true;
ae7e9ddd 1942 attrs.offset = tree_to_uhwi (off_tree);
6f1087be
RH
1943 apply_bitpos = bitpos;
1944 }
998d7deb 1945 }
30b0317c 1946 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1947 }
1948
56c47f22 1949 /* If this is an indirect reference, record it. */
70f34814 1950 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1951 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1952 {
f12144dd 1953 attrs.expr = t;
754c3d5d
RS
1954 attrs.offset_known_p = true;
1955 attrs.offset = 0;
56c47f22
RG
1956 apply_bitpos = bitpos;
1957 }
1958
30b0317c
RB
1959 /* Compute the alignment. */
1960 unsigned int obj_align;
1961 unsigned HOST_WIDE_INT obj_bitpos;
1962 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1963 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1964 if (obj_bitpos != 0)
1965 obj_align = (obj_bitpos & -obj_bitpos);
1966 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
1967 }
1968
cc269bb6 1969 if (tree_fits_uhwi_p (new_size))
a787ccc3
RS
1970 {
1971 attrs.size_known_p = true;
ae7e9ddd 1972 attrs.size = tree_to_uhwi (new_size);
a787ccc3
RS
1973 }
1974
15c812e3 1975 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1976 bit position offset. Similarly, increase the size of the accessed
1977 object to contain the negative offset. */
6f1087be 1978 if (apply_bitpos)
8c317c5f 1979 {
754c3d5d
RS
1980 gcc_assert (attrs.offset_known_p);
1981 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1982 if (attrs.size_known_p)
1983 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1984 }
6f1087be 1985
8ac61af7 1986 /* Now set the attributes we computed above. */
f18a7b25 1987 attrs.addrspace = as;
f12144dd 1988 set_mem_attrs (ref, &attrs);
173b24b9
RK
1989}
1990
6f1087be 1991void
502b8322 1992set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1993{
1994 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1995}
1996
173b24b9
RK
1997/* Set the alias set of MEM to SET. */
1998
1999void
4862826d 2000set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 2001{
f12144dd
RS
2002 struct mem_attrs attrs;
2003
173b24b9 2004 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 2005 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
2006 attrs = *get_mem_attrs (mem);
2007 attrs.alias = set;
2008 set_mem_attrs (mem, &attrs);
09e881c9
BE
2009}
2010
2011/* Set the address space of MEM to ADDRSPACE (target-defined). */
2012
2013void
2014set_mem_addr_space (rtx mem, addr_space_t addrspace)
2015{
f12144dd
RS
2016 struct mem_attrs attrs;
2017
2018 attrs = *get_mem_attrs (mem);
2019 attrs.addrspace = addrspace;
2020 set_mem_attrs (mem, &attrs);
173b24b9 2021}
738cc472 2022
d022d93e 2023/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
2024
2025void
502b8322 2026set_mem_align (rtx mem, unsigned int align)
738cc472 2027{
f12144dd
RS
2028 struct mem_attrs attrs;
2029
2030 attrs = *get_mem_attrs (mem);
2031 attrs.align = align;
2032 set_mem_attrs (mem, &attrs);
738cc472 2033}
1285011e 2034
998d7deb 2035/* Set the expr for MEM to EXPR. */
1285011e
RK
2036
2037void
502b8322 2038set_mem_expr (rtx mem, tree expr)
1285011e 2039{
f12144dd
RS
2040 struct mem_attrs attrs;
2041
2042 attrs = *get_mem_attrs (mem);
2043 attrs.expr = expr;
2044 set_mem_attrs (mem, &attrs);
1285011e 2045}
998d7deb
RH
2046
2047/* Set the offset of MEM to OFFSET. */
2048
2049void
527210c4 2050set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 2051{
f12144dd
RS
2052 struct mem_attrs attrs;
2053
2054 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2055 attrs.offset_known_p = true;
2056 attrs.offset = offset;
527210c4
RS
2057 set_mem_attrs (mem, &attrs);
2058}
2059
2060/* Clear the offset of MEM. */
2061
2062void
2063clear_mem_offset (rtx mem)
2064{
2065 struct mem_attrs attrs;
2066
2067 attrs = *get_mem_attrs (mem);
754c3d5d 2068 attrs.offset_known_p = false;
f12144dd 2069 set_mem_attrs (mem, &attrs);
35aff10b
AM
2070}
2071
2072/* Set the size of MEM to SIZE. */
2073
2074void
f5541398 2075set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 2076{
f12144dd
RS
2077 struct mem_attrs attrs;
2078
2079 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2080 attrs.size_known_p = true;
2081 attrs.size = size;
f5541398
RS
2082 set_mem_attrs (mem, &attrs);
2083}
2084
2085/* Clear the size of MEM. */
2086
2087void
2088clear_mem_size (rtx mem)
2089{
2090 struct mem_attrs attrs;
2091
2092 attrs = *get_mem_attrs (mem);
754c3d5d 2093 attrs.size_known_p = false;
f12144dd 2094 set_mem_attrs (mem, &attrs);
998d7deb 2095}
173b24b9 2096\f
738cc472
RK
2097/* Return a memory reference like MEMREF, but with its mode changed to MODE
2098 and its address changed to ADDR. (VOIDmode means don't change the mode.
2099 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2100 returned memory location is required to be valid. INPLACE is true if any
2101 changes can be made directly to MEMREF or false if MEMREF must be treated
2102 as immutable.
2103
2104 The memory attributes are not changed. */
23b2ce53 2105
738cc472 2106static rtx
ef4bddc2 2107change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
23b33725 2108 bool inplace)
23b2ce53 2109{
09e881c9 2110 addr_space_t as;
60564289 2111 rtx new_rtx;
23b2ce53 2112
5b0264cb 2113 gcc_assert (MEM_P (memref));
09e881c9 2114 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2115 if (mode == VOIDmode)
2116 mode = GET_MODE (memref);
2117 if (addr == 0)
2118 addr = XEXP (memref, 0);
a74ff877 2119 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2120 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2121 return memref;
23b2ce53 2122
91c5ee5b
VM
2123 /* Don't validate address for LRA. LRA can make the address valid
2124 by itself in most efficient way. */
2125 if (validate && !lra_in_progress)
23b2ce53 2126 {
f1ec5147 2127 if (reload_in_progress || reload_completed)
09e881c9 2128 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2129 else
09e881c9 2130 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2131 }
750c9258 2132
9b04c6a8
RK
2133 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2134 return memref;
2135
23b33725
RS
2136 if (inplace)
2137 {
2138 XEXP (memref, 0) = addr;
2139 return memref;
2140 }
2141
60564289
KG
2142 new_rtx = gen_rtx_MEM (mode, addr);
2143 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2144 return new_rtx;
23b2ce53 2145}
792760b9 2146
738cc472
RK
2147/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2148 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2149
2150rtx
ef4bddc2 2151change_address (rtx memref, machine_mode mode, rtx addr)
f4ef873c 2152{
23b33725 2153 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
ef4bddc2 2154 machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2155 struct mem_attrs attrs, *defattrs;
4e44c1ef 2156
f12144dd
RS
2157 attrs = *get_mem_attrs (memref);
2158 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2159 attrs.expr = NULL_TREE;
2160 attrs.offset_known_p = false;
2161 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2162 attrs.size = defattrs->size;
2163 attrs.align = defattrs->align;
c2f7bcc3 2164
fdb1c7b3 2165 /* If there are no changes, just return the original memory reference. */
60564289 2166 if (new_rtx == memref)
4e44c1ef 2167 {
f12144dd 2168 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2169 return new_rtx;
4e44c1ef 2170
60564289
KG
2171 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2172 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2173 }
fdb1c7b3 2174
f12144dd 2175 set_mem_attrs (new_rtx, &attrs);
60564289 2176 return new_rtx;
f4ef873c 2177}
792760b9 2178
738cc472
RK
2179/* Return a memory reference like MEMREF, but with its mode changed
2180 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2181 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2182 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2183 and the caller is responsible for adjusting MEMREF base register.
2184 If ADJUST_OBJECT is zero, the underlying object associated with the
2185 memory reference is left unchanged and the caller is responsible for
2186 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2187 the underlying object, even partially, then the object is dropped.
2188 SIZE, if nonzero, is the size of an access in cases where MODE
2189 has no inherent size. */
f1ec5147
RK
2190
2191rtx
ef4bddc2 2192adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2193 int validate, int adjust_address, int adjust_object,
2194 HOST_WIDE_INT size)
f1ec5147 2195{
823e3574 2196 rtx addr = XEXP (memref, 0);
60564289 2197 rtx new_rtx;
ef4bddc2 2198 machine_mode address_mode;
a6fe9ed4 2199 int pbits;
0207fa90 2200 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2201 unsigned HOST_WIDE_INT max_align;
0207fa90 2202#ifdef POINTERS_EXTEND_UNSIGNED
ef4bddc2 2203 machine_mode pointer_mode
0207fa90
EB
2204 = targetm.addr_space.pointer_mode (attrs.addrspace);
2205#endif
823e3574 2206
ee88e690
EB
2207 /* VOIDmode means no mode change for change_address_1. */
2208 if (mode == VOIDmode)
2209 mode = GET_MODE (memref);
2210
5f2cbd0d
RS
2211 /* Take the size of non-BLKmode accesses from the mode. */
2212 defattrs = mode_mem_attrs[(int) mode];
2213 if (defattrs->size_known_p)
2214 size = defattrs->size;
2215
fdb1c7b3
JH
2216 /* If there are no changes, just return the original memory reference. */
2217 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2218 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2219 && (!validate || memory_address_addr_space_p (mode, addr,
2220 attrs.addrspace)))
fdb1c7b3
JH
2221 return memref;
2222
d14419e4 2223 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2224 This may happen even if offset is nonzero -- consider
d14419e4
RH
2225 (plus (plus reg reg) const_int) -- so do this always. */
2226 addr = copy_rtx (addr);
2227
a6fe9ed4
JM
2228 /* Convert a possibly large offset to a signed value within the
2229 range of the target address space. */
372d6395 2230 address_mode = get_address_mode (memref);
d4ebfa65 2231 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2232 if (HOST_BITS_PER_WIDE_INT > pbits)
2233 {
2234 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2235 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2236 >> shift);
2237 }
2238
5ef0b50d 2239 if (adjust_address)
4a78c787
RH
2240 {
2241 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2242 object, we can merge it into the LO_SUM. */
2243 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2244 && offset >= 0
2245 && (unsigned HOST_WIDE_INT) offset
2246 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2247 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2248 plus_constant (address_mode,
2249 XEXP (addr, 1), offset));
0207fa90
EB
2250#ifdef POINTERS_EXTEND_UNSIGNED
2251 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2252 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2253 the fact that pointers are not allowed to overflow. */
2254 else if (POINTERS_EXTEND_UNSIGNED > 0
2255 && GET_CODE (addr) == ZERO_EXTEND
2256 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2257 && trunc_int_for_mode (offset, pointer_mode) == offset)
2258 addr = gen_rtx_ZERO_EXTEND (address_mode,
2259 plus_constant (pointer_mode,
2260 XEXP (addr, 0), offset));
2261#endif
4a78c787 2262 else
0a81f074 2263 addr = plus_constant (address_mode, addr, offset);
4a78c787 2264 }
823e3574 2265
23b33725 2266 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2267
09efeca1
PB
2268 /* If the address is a REG, change_address_1 rightfully returns memref,
2269 but this would destroy memref's MEM_ATTRS. */
2270 if (new_rtx == memref && offset != 0)
2271 new_rtx = copy_rtx (new_rtx);
2272
5ef0b50d
EB
2273 /* Conservatively drop the object if we don't know where we start from. */
2274 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2275 {
2276 attrs.expr = NULL_TREE;
2277 attrs.alias = 0;
2278 }
2279
738cc472
RK
2280 /* Compute the new values of the memory attributes due to this adjustment.
2281 We add the offsets and update the alignment. */
754c3d5d 2282 if (attrs.offset_known_p)
5ef0b50d
EB
2283 {
2284 attrs.offset += offset;
2285
2286 /* Drop the object if the new left end is not within its bounds. */
2287 if (adjust_object && attrs.offset < 0)
2288 {
2289 attrs.expr = NULL_TREE;
2290 attrs.alias = 0;
2291 }
2292 }
738cc472 2293
03bf2c23
RK
2294 /* Compute the new alignment by taking the MIN of the alignment and the
2295 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2296 if zero. */
2297 if (offset != 0)
f12144dd
RS
2298 {
2299 max_align = (offset & -offset) * BITS_PER_UNIT;
2300 attrs.align = MIN (attrs.align, max_align);
2301 }
738cc472 2302
5f2cbd0d 2303 if (size)
754c3d5d 2304 {
5ef0b50d 2305 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2306 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2307 {
2308 attrs.expr = NULL_TREE;
2309 attrs.alias = 0;
2310 }
754c3d5d 2311 attrs.size_known_p = true;
5f2cbd0d 2312 attrs.size = size;
754c3d5d
RS
2313 }
2314 else if (attrs.size_known_p)
5ef0b50d 2315 {
5f2cbd0d 2316 gcc_assert (!adjust_object);
5ef0b50d 2317 attrs.size -= offset;
5f2cbd0d
RS
2318 /* ??? The store_by_pieces machinery generates negative sizes,
2319 so don't assert for that here. */
5ef0b50d 2320 }
10b76d73 2321
f12144dd 2322 set_mem_attrs (new_rtx, &attrs);
738cc472 2323
60564289 2324 return new_rtx;
f1ec5147
RK
2325}
2326
630036c6
JJ
2327/* Return a memory reference like MEMREF, but with its mode changed
2328 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2329 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2330 nonzero, the memory address is forced to be valid. */
2331
2332rtx
ef4bddc2 2333adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
502b8322 2334 HOST_WIDE_INT offset, int validate)
630036c6 2335{
23b33725 2336 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2337 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2338}
2339
8ac61af7
RK
2340/* Return a memory reference like MEMREF, but whose address is changed by
2341 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2342 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2343
2344rtx
502b8322 2345offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2346{
60564289 2347 rtx new_rtx, addr = XEXP (memref, 0);
ef4bddc2 2348 machine_mode address_mode;
754c3d5d 2349 struct mem_attrs attrs, *defattrs;
e3c8ea67 2350
f12144dd 2351 attrs = *get_mem_attrs (memref);
372d6395 2352 address_mode = get_address_mode (memref);
d4ebfa65 2353 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2354
68252e27 2355 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2356 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2357
2358 However, if we did go and rearrange things, we can wind up not
2359 being able to recognize the magic around pic_offset_table_rtx.
2360 This stuff is fragile, and is yet another example of why it is
2361 bad to expose PIC machinery too early. */
f12144dd
RS
2362 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2363 attrs.addrspace)
e3c8ea67
RH
2364 && GET_CODE (addr) == PLUS
2365 && XEXP (addr, 0) == pic_offset_table_rtx)
2366 {
2367 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2368 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2369 }
2370
60564289 2371 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2372 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2373
fdb1c7b3 2374 /* If there are no changes, just return the original memory reference. */
60564289
KG
2375 if (new_rtx == memref)
2376 return new_rtx;
fdb1c7b3 2377
0d4903b8
RK
2378 /* Update the alignment to reflect the offset. Reset the offset, which
2379 we don't know. */
754c3d5d
RS
2380 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2381 attrs.offset_known_p = false;
2382 attrs.size_known_p = defattrs->size_known_p;
2383 attrs.size = defattrs->size;
f12144dd
RS
2384 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2385 set_mem_attrs (new_rtx, &attrs);
60564289 2386 return new_rtx;
0d4903b8 2387}
68252e27 2388
792760b9
RK
2389/* Return a memory reference like MEMREF, but with its address changed to
2390 ADDR. The caller is asserting that the actual piece of memory pointed
2391 to is the same, just the form of the address is being changed, such as
23b33725
RS
2392 by putting something into a register. INPLACE is true if any changes
2393 can be made directly to MEMREF or false if MEMREF must be treated as
2394 immutable. */
792760b9
RK
2395
2396rtx
23b33725 2397replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2398{
738cc472
RK
2399 /* change_address_1 copies the memory attribute structure without change
2400 and that's exactly what we want here. */
40c0668b 2401 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2402 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2403}
738cc472 2404
f1ec5147
RK
2405/* Likewise, but the reference is not required to be valid. */
2406
2407rtx
23b33725 2408replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2409{
23b33725 2410 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2411}
e7dfe4bb
RH
2412
2413/* Return a memory reference like MEMREF, but with its mode widened to
2414 MODE and offset by OFFSET. This would be used by targets that e.g.
2415 cannot issue QImode memory operations and have to use SImode memory
2416 operations plus masking logic. */
2417
2418rtx
ef4bddc2 2419widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2420{
5f2cbd0d 2421 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2422 struct mem_attrs attrs;
e7dfe4bb
RH
2423 unsigned int size = GET_MODE_SIZE (mode);
2424
fdb1c7b3 2425 /* If there are no changes, just return the original memory reference. */
60564289
KG
2426 if (new_rtx == memref)
2427 return new_rtx;
fdb1c7b3 2428
f12144dd
RS
2429 attrs = *get_mem_attrs (new_rtx);
2430
e7dfe4bb
RH
2431 /* If we don't know what offset we were at within the expression, then
2432 we can't know if we've overstepped the bounds. */
754c3d5d 2433 if (! attrs.offset_known_p)
f12144dd 2434 attrs.expr = NULL_TREE;
e7dfe4bb 2435
f12144dd 2436 while (attrs.expr)
e7dfe4bb 2437 {
f12144dd 2438 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2439 {
f12144dd
RS
2440 tree field = TREE_OPERAND (attrs.expr, 1);
2441 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2442
2443 if (! DECL_SIZE_UNIT (field))
2444 {
f12144dd 2445 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2446 break;
2447 }
2448
2449 /* Is the field at least as large as the access? If so, ok,
2450 otherwise strip back to the containing structure. */
03667700
RK
2451 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2452 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2453 && attrs.offset >= 0)
e7dfe4bb
RH
2454 break;
2455
cc269bb6 2456 if (! tree_fits_uhwi_p (offset))
e7dfe4bb 2457 {
f12144dd 2458 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2459 break;
2460 }
2461
f12144dd 2462 attrs.expr = TREE_OPERAND (attrs.expr, 0);
ae7e9ddd
RS
2463 attrs.offset += tree_to_uhwi (offset);
2464 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2465 / BITS_PER_UNIT);
e7dfe4bb
RH
2466 }
2467 /* Similarly for the decl. */
f12144dd
RS
2468 else if (DECL_P (attrs.expr)
2469 && DECL_SIZE_UNIT (attrs.expr)
2470 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2471 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2472 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2473 break;
2474 else
2475 {
2476 /* The widened memory access overflows the expression, which means
2477 that it could alias another expression. Zap it. */
f12144dd 2478 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2479 break;
2480 }
2481 }
2482
f12144dd 2483 if (! attrs.expr)
754c3d5d 2484 attrs.offset_known_p = false;
e7dfe4bb
RH
2485
2486 /* The widened memory may alias other stuff, so zap the alias set. */
2487 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2488 attrs.alias = 0;
754c3d5d
RS
2489 attrs.size_known_p = true;
2490 attrs.size = size;
f12144dd 2491 set_mem_attrs (new_rtx, &attrs);
60564289 2492 return new_rtx;
e7dfe4bb 2493}
23b2ce53 2494\f
f6129d66
RH
2495/* A fake decl that is used as the MEM_EXPR of spill slots. */
2496static GTY(()) tree spill_slot_decl;
2497
3d7e23f6
RH
2498tree
2499get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2500{
2501 tree d = spill_slot_decl;
2502 rtx rd;
f12144dd 2503 struct mem_attrs attrs;
f6129d66 2504
3d7e23f6 2505 if (d || !force_build_p)
f6129d66
RH
2506 return d;
2507
c2255bc4
AH
2508 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2509 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2510 DECL_ARTIFICIAL (d) = 1;
2511 DECL_IGNORED_P (d) = 1;
2512 TREE_USED (d) = 1;
f6129d66
RH
2513 spill_slot_decl = d;
2514
2515 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2516 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2517 attrs = *mode_mem_attrs[(int) BLKmode];
2518 attrs.alias = new_alias_set ();
2519 attrs.expr = d;
2520 set_mem_attrs (rd, &attrs);
f6129d66
RH
2521 SET_DECL_RTL (d, rd);
2522
2523 return d;
2524}
2525
2526/* Given MEM, a result from assign_stack_local, fill in the memory
2527 attributes as appropriate for a register allocator spill slot.
2528 These slots are not aliasable by other memory. We arrange for
2529 them all to use a single MEM_EXPR, so that the aliasing code can
2530 work properly in the case of shared spill slots. */
2531
2532void
2533set_mem_attrs_for_spill (rtx mem)
2534{
f12144dd
RS
2535 struct mem_attrs attrs;
2536 rtx addr;
f6129d66 2537
f12144dd
RS
2538 attrs = *get_mem_attrs (mem);
2539 attrs.expr = get_spill_slot_decl (true);
2540 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2541 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2542
2543 /* We expect the incoming memory to be of the form:
2544 (mem:MODE (plus (reg sfp) (const_int offset)))
2545 with perhaps the plus missing for offset = 0. */
2546 addr = XEXP (mem, 0);
754c3d5d
RS
2547 attrs.offset_known_p = true;
2548 attrs.offset = 0;
f6129d66 2549 if (GET_CODE (addr) == PLUS
481683e1 2550 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2551 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2552
f12144dd 2553 set_mem_attrs (mem, &attrs);
f6129d66
RH
2554 MEM_NOTRAP_P (mem) = 1;
2555}
2556\f
23b2ce53
RS
2557/* Return a newly created CODE_LABEL rtx with a unique label number. */
2558
7dcc3ab5 2559rtx_code_label *
502b8322 2560gen_label_rtx (void)
23b2ce53 2561{
7dcc3ab5
DM
2562 return as_a <rtx_code_label *> (
2563 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2564 NULL, label_num++, NULL));
23b2ce53
RS
2565}
2566\f
2567/* For procedure integration. */
2568
23b2ce53 2569/* Install new pointers to the first and last insns in the chain.
86fe05e0 2570 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2571 Used for an inline-procedure after copying the insn chain. */
2572
2573void
fee3e72c 2574set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2575{
fee3e72c 2576 rtx_insn *insn;
86fe05e0 2577
5936d944
JH
2578 set_first_insn (first);
2579 set_last_insn (last);
86fe05e0
RK
2580 cur_insn_uid = 0;
2581
b5b8b0ac
AO
2582 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2583 {
2584 int debug_count = 0;
2585
2586 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2587 cur_debug_insn_uid = 0;
2588
2589 for (insn = first; insn; insn = NEXT_INSN (insn))
2590 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2591 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2592 else
2593 {
2594 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2595 if (DEBUG_INSN_P (insn))
2596 debug_count++;
2597 }
2598
2599 if (debug_count)
2600 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2601 else
2602 cur_debug_insn_uid++;
2603 }
2604 else
2605 for (insn = first; insn; insn = NEXT_INSN (insn))
2606 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2607
2608 cur_insn_uid++;
23b2ce53 2609}
23b2ce53 2610\f
750c9258 2611/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2612 structure. This routine should only be called once. */
23b2ce53 2613
fd743bc1 2614static void
6bb9bf63 2615unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2616{
d1b81779 2617 /* Unshare just about everything else. */
2c07f13b 2618 unshare_all_rtl_in_chain (insn);
750c9258 2619
23b2ce53
RS
2620 /* Make sure the addresses of stack slots found outside the insn chain
2621 (such as, in DECL_RTL of a variable) are not shared
2622 with the insn chain.
2623
2624 This special care is necessary when the stack slot MEM does not
2625 actually appear in the insn chain. If it does appear, its address
2626 is unshared from all else at that point. */
0f4783c7
DM
2627 stack_slot_list = safe_as_a <rtx_expr_list *> (
2628 copy_rtx_if_shared (stack_slot_list));
23b2ce53
RS
2629}
2630
750c9258 2631/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2632 structure, again. This is a fairly expensive thing to do so it
2633 should be done sparingly. */
2634
2635void
6bb9bf63 2636unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2637{
6bb9bf63 2638 rtx_insn *p;
624c87aa
RE
2639 tree decl;
2640
d1b81779 2641 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2642 if (INSN_P (p))
d1b81779
GK
2643 {
2644 reset_used_flags (PATTERN (p));
2645 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2646 if (CALL_P (p))
2647 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2648 }
624c87aa 2649
2d4aecb3 2650 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2651 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2652
624c87aa 2653 /* Make sure that virtual parameters are not shared. */
910ad8de 2654 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2655 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2656
2657 reset_used_flags (stack_slot_list);
2658
b4aaa77b 2659 unshare_all_rtl_1 (insn);
fd743bc1
PB
2660}
2661
c2924966 2662unsigned int
fd743bc1
PB
2663unshare_all_rtl (void)
2664{
b4aaa77b 2665 unshare_all_rtl_1 (get_insns ());
c2924966 2666 return 0;
d1b81779
GK
2667}
2668
ef330312 2669
2c07f13b
JH
2670/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2671 Recursively does the same for subexpressions. */
2672
2673static void
2674verify_rtx_sharing (rtx orig, rtx insn)
2675{
2676 rtx x = orig;
2677 int i;
2678 enum rtx_code code;
2679 const char *format_ptr;
2680
2681 if (x == 0)
2682 return;
2683
2684 code = GET_CODE (x);
2685
2686 /* These types may be freely shared. */
2687
2688 switch (code)
2689 {
2690 case REG:
0ca5af51
AO
2691 case DEBUG_EXPR:
2692 case VALUE:
d8116890 2693 CASE_CONST_ANY:
2c07f13b
JH
2694 case SYMBOL_REF:
2695 case LABEL_REF:
2696 case CODE_LABEL:
2697 case PC:
2698 case CC0:
3810076b 2699 case RETURN:
26898771 2700 case SIMPLE_RETURN:
2c07f13b 2701 case SCRATCH:
3e89ed8d 2702 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2703 return;
3e89ed8d 2704 case CLOBBER:
c5c5ba89
JH
2705 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2706 clobbers or clobbers of hard registers that originated as pseudos.
2707 This is needed to allow safe register renaming. */
2708 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2709 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2710 return;
2711 break;
2c07f13b
JH
2712
2713 case CONST:
6fb5fa3c 2714 if (shared_const_p (orig))
2c07f13b
JH
2715 return;
2716 break;
2717
2718 case MEM:
2719 /* A MEM is allowed to be shared if its address is constant. */
2720 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2721 || reload_completed || reload_in_progress)
2722 return;
2723
2724 break;
2725
2726 default:
2727 break;
2728 }
2729
2730 /* This rtx may not be shared. If it has already been seen,
2731 replace it with a copy of itself. */
1a2caa7a 2732#ifdef ENABLE_CHECKING
2c07f13b
JH
2733 if (RTX_FLAG (x, used))
2734 {
ab532386 2735 error ("invalid rtl sharing found in the insn");
2c07f13b 2736 debug_rtx (insn);
ab532386 2737 error ("shared rtx");
2c07f13b 2738 debug_rtx (x);
ab532386 2739 internal_error ("internal consistency failure");
2c07f13b 2740 }
1a2caa7a
NS
2741#endif
2742 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2743
2c07f13b
JH
2744 RTX_FLAG (x, used) = 1;
2745
6614fd40 2746 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2747
2748 format_ptr = GET_RTX_FORMAT (code);
2749
2750 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2751 {
2752 switch (*format_ptr++)
2753 {
2754 case 'e':
2755 verify_rtx_sharing (XEXP (x, i), insn);
2756 break;
2757
2758 case 'E':
2759 if (XVEC (x, i) != NULL)
2760 {
2761 int j;
2762 int len = XVECLEN (x, i);
2763
2764 for (j = 0; j < len; j++)
2765 {
1a2caa7a
NS
2766 /* We allow sharing of ASM_OPERANDS inside single
2767 instruction. */
2c07f13b 2768 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2769 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2770 == ASM_OPERANDS))
2c07f13b
JH
2771 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2772 else
2773 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2774 }
2775 }
2776 break;
2777 }
2778 }
2779 return;
2780}
2781
0e0f87d4
SB
2782/* Reset used-flags for INSN. */
2783
2784static void
2785reset_insn_used_flags (rtx insn)
2786{
2787 gcc_assert (INSN_P (insn));
2788 reset_used_flags (PATTERN (insn));
2789 reset_used_flags (REG_NOTES (insn));
2790 if (CALL_P (insn))
2791 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2792}
2793
a24243a0 2794/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2795
a24243a0
AK
2796static void
2797reset_all_used_flags (void)
2c07f13b 2798{
dc01c3d1 2799 rtx_insn *p;
2c07f13b
JH
2800
2801 for (p = get_insns (); p; p = NEXT_INSN (p))
2802 if (INSN_P (p))
2803 {
0e0f87d4
SB
2804 rtx pat = PATTERN (p);
2805 if (GET_CODE (pat) != SEQUENCE)
2806 reset_insn_used_flags (p);
2807 else
2954a813 2808 {
0e0f87d4
SB
2809 gcc_assert (REG_NOTES (p) == NULL);
2810 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2811 {
2812 rtx insn = XVECEXP (pat, 0, i);
2813 if (INSN_P (insn))
2814 reset_insn_used_flags (insn);
2815 }
2954a813 2816 }
2c07f13b 2817 }
a24243a0
AK
2818}
2819
0e0f87d4
SB
2820/* Verify sharing in INSN. */
2821
2822static void
2823verify_insn_sharing (rtx insn)
2824{
2825 gcc_assert (INSN_P (insn));
2826 reset_used_flags (PATTERN (insn));
2827 reset_used_flags (REG_NOTES (insn));
2828 if (CALL_P (insn))
2829 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2830}
2831
a24243a0
AK
2832/* Go through all the RTL insn bodies and check that there is no unexpected
2833 sharing in between the subexpressions. */
2834
2835DEBUG_FUNCTION void
2836verify_rtl_sharing (void)
2837{
dc01c3d1 2838 rtx_insn *p;
a24243a0
AK
2839
2840 timevar_push (TV_VERIFY_RTL_SHARING);
2841
2842 reset_all_used_flags ();
2c07f13b
JH
2843
2844 for (p = get_insns (); p; p = NEXT_INSN (p))
2845 if (INSN_P (p))
2846 {
0e0f87d4
SB
2847 rtx pat = PATTERN (p);
2848 if (GET_CODE (pat) != SEQUENCE)
2849 verify_insn_sharing (p);
2850 else
2851 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2852 {
2853 rtx insn = XVECEXP (pat, 0, i);
2854 if (INSN_P (insn))
2855 verify_insn_sharing (insn);
2856 }
2c07f13b 2857 }
a222c01a 2858
a24243a0
AK
2859 reset_all_used_flags ();
2860
a222c01a 2861 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2862}
2863
d1b81779
GK
2864/* Go through all the RTL insn bodies and copy any invalid shared structure.
2865 Assumes the mark bits are cleared at entry. */
2866
2c07f13b 2867void
dc01c3d1 2868unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
2869{
2870 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2871 if (INSN_P (insn))
d1b81779
GK
2872 {
2873 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2874 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2875 if (CALL_P (insn))
2876 CALL_INSN_FUNCTION_USAGE (insn)
2877 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2878 }
2879}
2880
2d4aecb3 2881/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2882 shared. We never replace the DECL_RTLs themselves with a copy,
2883 but expressions mentioned into a DECL_RTL cannot be shared with
2884 expressions in the instruction stream.
2885
2886 Note that reload may convert pseudo registers into memories in-place.
2887 Pseudo registers are always shared, but MEMs never are. Thus if we
2888 reset the used flags on MEMs in the instruction stream, we must set
2889 them again on MEMs that appear in DECL_RTLs. */
2890
2d4aecb3 2891static void
5eb2a9f2 2892set_used_decls (tree blk)
2d4aecb3
AO
2893{
2894 tree t;
2895
2896 /* Mark decls. */
910ad8de 2897 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2898 if (DECL_RTL_SET_P (t))
5eb2a9f2 2899 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2900
2901 /* Now process sub-blocks. */
87caf699 2902 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2903 set_used_decls (t);
2d4aecb3
AO
2904}
2905
23b2ce53 2906/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2907 Recursively does the same for subexpressions. Uses
2908 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2909
2910rtx
502b8322 2911copy_rtx_if_shared (rtx orig)
23b2ce53 2912{
32b32b16
AP
2913 copy_rtx_if_shared_1 (&orig);
2914 return orig;
2915}
2916
ff954f39
AP
2917/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2918 use. Recursively does the same for subexpressions. */
2919
32b32b16
AP
2920static void
2921copy_rtx_if_shared_1 (rtx *orig1)
2922{
2923 rtx x;
b3694847
SS
2924 int i;
2925 enum rtx_code code;
32b32b16 2926 rtx *last_ptr;
b3694847 2927 const char *format_ptr;
23b2ce53 2928 int copied = 0;
32b32b16
AP
2929 int length;
2930
2931 /* Repeat is used to turn tail-recursion into iteration. */
2932repeat:
2933 x = *orig1;
23b2ce53
RS
2934
2935 if (x == 0)
32b32b16 2936 return;
23b2ce53
RS
2937
2938 code = GET_CODE (x);
2939
2940 /* These types may be freely shared. */
2941
2942 switch (code)
2943 {
2944 case REG:
0ca5af51
AO
2945 case DEBUG_EXPR:
2946 case VALUE:
d8116890 2947 CASE_CONST_ANY:
23b2ce53 2948 case SYMBOL_REF:
2c07f13b 2949 case LABEL_REF:
23b2ce53
RS
2950 case CODE_LABEL:
2951 case PC:
2952 case CC0:
276e0224 2953 case RETURN:
26898771 2954 case SIMPLE_RETURN:
23b2ce53 2955 case SCRATCH:
0f41302f 2956 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2957 return;
3e89ed8d 2958 case CLOBBER:
c5c5ba89
JH
2959 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2960 clobbers or clobbers of hard registers that originated as pseudos.
2961 This is needed to allow safe register renaming. */
2962 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2963 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2964 return;
2965 break;
23b2ce53 2966
b851ea09 2967 case CONST:
6fb5fa3c 2968 if (shared_const_p (x))
32b32b16 2969 return;
b851ea09
RK
2970 break;
2971
b5b8b0ac 2972 case DEBUG_INSN:
23b2ce53
RS
2973 case INSN:
2974 case JUMP_INSN:
2975 case CALL_INSN:
2976 case NOTE:
23b2ce53
RS
2977 case BARRIER:
2978 /* The chain of insns is not being copied. */
32b32b16 2979 return;
23b2ce53 2980
e9a25f70
JL
2981 default:
2982 break;
23b2ce53
RS
2983 }
2984
2985 /* This rtx may not be shared. If it has already been seen,
2986 replace it with a copy of itself. */
2987
2adc7f12 2988 if (RTX_FLAG (x, used))
23b2ce53 2989 {
aacd3885 2990 x = shallow_copy_rtx (x);
23b2ce53
RS
2991 copied = 1;
2992 }
2adc7f12 2993 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2994
2995 /* Now scan the subexpressions recursively.
2996 We can store any replaced subexpressions directly into X
2997 since we know X is not shared! Any vectors in X
2998 must be copied if X was copied. */
2999
3000 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
3001 length = GET_RTX_LENGTH (code);
3002 last_ptr = NULL;
b8698a0f 3003
32b32b16 3004 for (i = 0; i < length; i++)
23b2ce53
RS
3005 {
3006 switch (*format_ptr++)
3007 {
3008 case 'e':
32b32b16
AP
3009 if (last_ptr)
3010 copy_rtx_if_shared_1 (last_ptr);
3011 last_ptr = &XEXP (x, i);
23b2ce53
RS
3012 break;
3013
3014 case 'E':
3015 if (XVEC (x, i) != NULL)
3016 {
b3694847 3017 int j;
f0722107 3018 int len = XVECLEN (x, i);
b8698a0f 3019
6614fd40
KH
3020 /* Copy the vector iff I copied the rtx and the length
3021 is nonzero. */
f0722107 3022 if (copied && len > 0)
8f985ec4 3023 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 3024
5d3cc252 3025 /* Call recursively on all inside the vector. */
f0722107 3026 for (j = 0; j < len; j++)
32b32b16
AP
3027 {
3028 if (last_ptr)
3029 copy_rtx_if_shared_1 (last_ptr);
3030 last_ptr = &XVECEXP (x, i, j);
3031 }
23b2ce53
RS
3032 }
3033 break;
3034 }
3035 }
32b32b16
AP
3036 *orig1 = x;
3037 if (last_ptr)
3038 {
3039 orig1 = last_ptr;
3040 goto repeat;
3041 }
3042 return;
23b2ce53
RS
3043}
3044
76369a82 3045/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 3046
76369a82
NF
3047static void
3048mark_used_flags (rtx x, int flag)
23b2ce53 3049{
b3694847
SS
3050 int i, j;
3051 enum rtx_code code;
3052 const char *format_ptr;
32b32b16 3053 int length;
23b2ce53 3054
32b32b16
AP
3055 /* Repeat is used to turn tail-recursion into iteration. */
3056repeat:
23b2ce53
RS
3057 if (x == 0)
3058 return;
3059
3060 code = GET_CODE (x);
3061
9faa82d8 3062 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3063 for them. */
3064
3065 switch (code)
3066 {
3067 case REG:
0ca5af51
AO
3068 case DEBUG_EXPR:
3069 case VALUE:
d8116890 3070 CASE_CONST_ANY:
23b2ce53
RS
3071 case SYMBOL_REF:
3072 case CODE_LABEL:
3073 case PC:
3074 case CC0:
276e0224 3075 case RETURN:
26898771 3076 case SIMPLE_RETURN:
23b2ce53
RS
3077 return;
3078
b5b8b0ac 3079 case DEBUG_INSN:
23b2ce53
RS
3080 case INSN:
3081 case JUMP_INSN:
3082 case CALL_INSN:
3083 case NOTE:
3084 case LABEL_REF:
3085 case BARRIER:
3086 /* The chain of insns is not being copied. */
3087 return;
750c9258 3088
e9a25f70
JL
3089 default:
3090 break;
23b2ce53
RS
3091 }
3092
76369a82 3093 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3094
3095 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3096 length = GET_RTX_LENGTH (code);
b8698a0f 3097
32b32b16 3098 for (i = 0; i < length; i++)
23b2ce53
RS
3099 {
3100 switch (*format_ptr++)
3101 {
3102 case 'e':
32b32b16
AP
3103 if (i == length-1)
3104 {
3105 x = XEXP (x, i);
3106 goto repeat;
3107 }
76369a82 3108 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3109 break;
3110
3111 case 'E':
3112 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3113 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3114 break;
3115 }
3116 }
3117}
2c07f13b 3118
76369a82 3119/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3120 to look for shared sub-parts. */
3121
3122void
76369a82 3123reset_used_flags (rtx x)
2c07f13b 3124{
76369a82
NF
3125 mark_used_flags (x, 0);
3126}
2c07f13b 3127
76369a82
NF
3128/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3129 to look for shared sub-parts. */
2c07f13b 3130
76369a82
NF
3131void
3132set_used_flags (rtx x)
3133{
3134 mark_used_flags (x, 1);
2c07f13b 3135}
23b2ce53
RS
3136\f
3137/* Copy X if necessary so that it won't be altered by changes in OTHER.
3138 Return X or the rtx for the pseudo reg the value of X was copied into.
3139 OTHER must be valid as a SET_DEST. */
3140
3141rtx
502b8322 3142make_safe_from (rtx x, rtx other)
23b2ce53
RS
3143{
3144 while (1)
3145 switch (GET_CODE (other))
3146 {
3147 case SUBREG:
3148 other = SUBREG_REG (other);
3149 break;
3150 case STRICT_LOW_PART:
3151 case SIGN_EXTEND:
3152 case ZERO_EXTEND:
3153 other = XEXP (other, 0);
3154 break;
3155 default:
3156 goto done;
3157 }
3158 done:
3c0cb5de 3159 if ((MEM_P (other)
23b2ce53 3160 && ! CONSTANT_P (x)
f8cfc6aa 3161 && !REG_P (x)
23b2ce53 3162 && GET_CODE (x) != SUBREG)
f8cfc6aa 3163 || (REG_P (other)
23b2ce53
RS
3164 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3165 || reg_mentioned_p (other, x))))
3166 {
3167 rtx temp = gen_reg_rtx (GET_MODE (x));
3168 emit_move_insn (temp, x);
3169 return temp;
3170 }
3171 return x;
3172}
3173\f
3174/* Emission of insns (adding them to the doubly-linked list). */
3175
23b2ce53
RS
3176/* Return the last insn emitted, even if it is in a sequence now pushed. */
3177
db76cf1e 3178rtx_insn *
502b8322 3179get_last_insn_anywhere (void)
23b2ce53 3180{
614d5bd8
AM
3181 struct sequence_stack *seq;
3182 for (seq = get_current_sequence (); seq; seq = seq->next)
3183 if (seq->last != 0)
3184 return seq->last;
23b2ce53
RS
3185 return 0;
3186}
3187
2a496e8b
JDA
3188/* Return the first nonnote insn emitted in current sequence or current
3189 function. This routine looks inside SEQUENCEs. */
3190
e4685bc8 3191rtx_insn *
502b8322 3192get_first_nonnote_insn (void)
2a496e8b 3193{
dc01c3d1 3194 rtx_insn *insn = get_insns ();
91373fe8
JDA
3195
3196 if (insn)
3197 {
3198 if (NOTE_P (insn))
3199 for (insn = next_insn (insn);
3200 insn && NOTE_P (insn);
3201 insn = next_insn (insn))
3202 continue;
3203 else
3204 {
2ca202e7 3205 if (NONJUMP_INSN_P (insn)
91373fe8 3206 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3207 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3208 }
3209 }
2a496e8b
JDA
3210
3211 return insn;
3212}
3213
3214/* Return the last nonnote insn emitted in current sequence or current
3215 function. This routine looks inside SEQUENCEs. */
3216
e4685bc8 3217rtx_insn *
502b8322 3218get_last_nonnote_insn (void)
2a496e8b 3219{
dc01c3d1 3220 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3221
3222 if (insn)
3223 {
3224 if (NOTE_P (insn))
3225 for (insn = previous_insn (insn);
3226 insn && NOTE_P (insn);
3227 insn = previous_insn (insn))
3228 continue;
3229 else
3230 {
dc01c3d1
DM
3231 if (NONJUMP_INSN_P (insn))
3232 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3233 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3234 }
3235 }
2a496e8b
JDA
3236
3237 return insn;
3238}
3239
b5b8b0ac
AO
3240/* Return the number of actual (non-debug) insns emitted in this
3241 function. */
3242
3243int
3244get_max_insn_count (void)
3245{
3246 int n = cur_insn_uid;
3247
3248 /* The table size must be stable across -g, to avoid codegen
3249 differences due to debug insns, and not be affected by
3250 -fmin-insn-uid, to avoid excessive table size and to simplify
3251 debugging of -fcompare-debug failures. */
3252 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3253 n -= cur_debug_insn_uid;
3254 else
3255 n -= MIN_NONDEBUG_INSN_UID;
3256
3257 return n;
3258}
3259
23b2ce53
RS
3260\f
3261/* Return the next insn. If it is a SEQUENCE, return the first insn
3262 of the sequence. */
3263
eb51c837 3264rtx_insn *
4ce524a1 3265next_insn (rtx_insn *insn)
23b2ce53 3266{
75547801
KG
3267 if (insn)
3268 {
3269 insn = NEXT_INSN (insn);
3270 if (insn && NONJUMP_INSN_P (insn)
3271 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3272 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3273 }
23b2ce53 3274
dc01c3d1 3275 return insn;
23b2ce53
RS
3276}
3277
3278/* Return the previous insn. If it is a SEQUENCE, return the last insn
3279 of the sequence. */
3280
eb51c837 3281rtx_insn *
4ce524a1 3282previous_insn (rtx_insn *insn)
23b2ce53 3283{
75547801
KG
3284 if (insn)
3285 {
3286 insn = PREV_INSN (insn);
dc01c3d1
DM
3287 if (insn && NONJUMP_INSN_P (insn))
3288 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3289 insn = seq->insn (seq->len () - 1);
75547801 3290 }
23b2ce53 3291
dc01c3d1 3292 return insn;
23b2ce53
RS
3293}
3294
3295/* Return the next insn after INSN that is not a NOTE. This routine does not
3296 look inside SEQUENCEs. */
3297
eb51c837 3298rtx_insn *
dc01c3d1 3299next_nonnote_insn (rtx uncast_insn)
23b2ce53 3300{
dc01c3d1 3301 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
75547801
KG
3302 while (insn)
3303 {
3304 insn = NEXT_INSN (insn);
3305 if (insn == 0 || !NOTE_P (insn))
3306 break;
3307 }
23b2ce53 3308
dc01c3d1 3309 return insn;
23b2ce53
RS
3310}
3311
1e211590
DD
3312/* Return the next insn after INSN that is not a NOTE, but stop the
3313 search before we enter another basic block. This routine does not
3314 look inside SEQUENCEs. */
3315
eb51c837 3316rtx_insn *
e4685bc8 3317next_nonnote_insn_bb (rtx_insn *insn)
1e211590
DD
3318{
3319 while (insn)
3320 {
3321 insn = NEXT_INSN (insn);
3322 if (insn == 0 || !NOTE_P (insn))
3323 break;
3324 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3325 return NULL;
1e211590
DD
3326 }
3327
dc01c3d1 3328 return insn;
1e211590
DD
3329}
3330
23b2ce53
RS
3331/* Return the previous insn before INSN that is not a NOTE. This routine does
3332 not look inside SEQUENCEs. */
3333
eb51c837 3334rtx_insn *
dc01c3d1 3335prev_nonnote_insn (rtx uncast_insn)
23b2ce53 3336{
dc01c3d1
DM
3337 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3338
75547801
KG
3339 while (insn)
3340 {
3341 insn = PREV_INSN (insn);
3342 if (insn == 0 || !NOTE_P (insn))
3343 break;
3344 }
23b2ce53 3345
dc01c3d1 3346 return insn;
23b2ce53
RS
3347}
3348
896aa4ea
DD
3349/* Return the previous insn before INSN that is not a NOTE, but stop
3350 the search before we enter another basic block. This routine does
3351 not look inside SEQUENCEs. */
3352
eb51c837 3353rtx_insn *
dc01c3d1 3354prev_nonnote_insn_bb (rtx uncast_insn)
896aa4ea 3355{
dc01c3d1
DM
3356 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3357
896aa4ea
DD
3358 while (insn)
3359 {
3360 insn = PREV_INSN (insn);
3361 if (insn == 0 || !NOTE_P (insn))
3362 break;
3363 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3364 return NULL;
896aa4ea
DD
3365 }
3366
dc01c3d1 3367 return insn;
896aa4ea
DD
3368}
3369
b5b8b0ac
AO
3370/* Return the next insn after INSN that is not a DEBUG_INSN. This
3371 routine does not look inside SEQUENCEs. */
3372
eb51c837 3373rtx_insn *
dc01c3d1 3374next_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3375{
dc01c3d1
DM
3376 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3377
b5b8b0ac
AO
3378 while (insn)
3379 {
3380 insn = NEXT_INSN (insn);
3381 if (insn == 0 || !DEBUG_INSN_P (insn))
3382 break;
3383 }
3384
dc01c3d1 3385 return insn;
b5b8b0ac
AO
3386}
3387
3388/* Return the previous insn before INSN that is not a DEBUG_INSN.
3389 This routine does not look inside SEQUENCEs. */
3390
eb51c837 3391rtx_insn *
dc01c3d1 3392prev_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3393{
dc01c3d1
DM
3394 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3395
b5b8b0ac
AO
3396 while (insn)
3397 {
3398 insn = PREV_INSN (insn);
3399 if (insn == 0 || !DEBUG_INSN_P (insn))
3400 break;
3401 }
3402
dc01c3d1 3403 return insn;
b5b8b0ac
AO
3404}
3405
f0fc0803
JJ
3406/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3407 This routine does not look inside SEQUENCEs. */
3408
eb51c837 3409rtx_insn *
dc01c3d1 3410next_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3411{
dc01c3d1
DM
3412 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3413
f0fc0803
JJ
3414 while (insn)
3415 {
3416 insn = NEXT_INSN (insn);
3417 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3418 break;
3419 }
3420
dc01c3d1 3421 return insn;
f0fc0803
JJ
3422}
3423
3424/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3425 This routine does not look inside SEQUENCEs. */
3426
eb51c837 3427rtx_insn *
dc01c3d1 3428prev_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3429{
dc01c3d1
DM
3430 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3431
f0fc0803
JJ
3432 while (insn)
3433 {
3434 insn = PREV_INSN (insn);
3435 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3436 break;
3437 }
3438
dc01c3d1 3439 return insn;
f0fc0803
JJ
3440}
3441
23b2ce53
RS
3442/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3443 or 0, if there is none. This routine does not look inside
0f41302f 3444 SEQUENCEs. */
23b2ce53 3445
eb51c837 3446rtx_insn *
dc01c3d1 3447next_real_insn (rtx uncast_insn)
23b2ce53 3448{
dc01c3d1
DM
3449 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3450
75547801
KG
3451 while (insn)
3452 {
3453 insn = NEXT_INSN (insn);
3454 if (insn == 0 || INSN_P (insn))
3455 break;
3456 }
23b2ce53 3457
dc01c3d1 3458 return insn;
23b2ce53
RS
3459}
3460
3461/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3462 or 0, if there is none. This routine does not look inside
3463 SEQUENCEs. */
3464
eb51c837 3465rtx_insn *
dc01c3d1 3466prev_real_insn (rtx uncast_insn)
23b2ce53 3467{
dc01c3d1
DM
3468 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3469
75547801
KG
3470 while (insn)
3471 {
3472 insn = PREV_INSN (insn);
3473 if (insn == 0 || INSN_P (insn))
3474 break;
3475 }
23b2ce53 3476
dc01c3d1 3477 return insn;
23b2ce53
RS
3478}
3479
ee960939
OH
3480/* Return the last CALL_INSN in the current list, or 0 if there is none.
3481 This routine does not look inside SEQUENCEs. */
3482
049cfc4a 3483rtx_call_insn *
502b8322 3484last_call_insn (void)
ee960939 3485{
049cfc4a 3486 rtx_insn *insn;
ee960939
OH
3487
3488 for (insn = get_last_insn ();
4b4bf941 3489 insn && !CALL_P (insn);
ee960939
OH
3490 insn = PREV_INSN (insn))
3491 ;
3492
049cfc4a 3493 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3494}
3495
23b2ce53 3496/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3497 does not look inside SEQUENCEs. After reload this also skips over
3498 standalone USE and CLOBBER insn. */
23b2ce53 3499
69732dcb 3500int
4f588890 3501active_insn_p (const_rtx insn)
69732dcb 3502{
4b4bf941 3503 return (CALL_P (insn) || JUMP_P (insn)
39718607 3504 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3505 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3506 && (! reload_completed
3507 || (GET_CODE (PATTERN (insn)) != USE
3508 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3509}
3510
eb51c837 3511rtx_insn *
dc01c3d1 3512next_active_insn (rtx uncast_insn)
23b2ce53 3513{
dc01c3d1
DM
3514 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3515
75547801
KG
3516 while (insn)
3517 {
3518 insn = NEXT_INSN (insn);
3519 if (insn == 0 || active_insn_p (insn))
3520 break;
3521 }
23b2ce53 3522
dc01c3d1 3523 return insn;
23b2ce53
RS
3524}
3525
3526/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3527 does not look inside SEQUENCEs. After reload this also skips over
3528 standalone USE and CLOBBER insn. */
23b2ce53 3529
eb51c837 3530rtx_insn *
dc01c3d1 3531prev_active_insn (rtx uncast_insn)
23b2ce53 3532{
dc01c3d1
DM
3533 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3534
75547801
KG
3535 while (insn)
3536 {
3537 insn = PREV_INSN (insn);
3538 if (insn == 0 || active_insn_p (insn))
3539 break;
3540 }
23b2ce53 3541
dc01c3d1 3542 return insn;
23b2ce53 3543}
23b2ce53 3544\f
23b2ce53
RS
3545/* Return the next insn that uses CC0 after INSN, which is assumed to
3546 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3547 applied to the result of this function should yield INSN).
3548
3549 Normally, this is simply the next insn. However, if a REG_CC_USER note
3550 is present, it contains the insn that uses CC0.
3551
3552 Return 0 if we can't find the insn. */
3553
75b46023 3554rtx_insn *
dc01c3d1 3555next_cc0_user (rtx uncast_insn)
23b2ce53 3556{
dc01c3d1
DM
3557 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3558
906c4e36 3559 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3560
3561 if (note)
75b46023 3562 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3563
3564 insn = next_nonnote_insn (insn);
4b4bf941 3565 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3566 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3567
2c3c49de 3568 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3569 return insn;
23b2ce53
RS
3570
3571 return 0;
3572}
3573
3574/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3575 note, it is the previous insn. */
3576
75b46023 3577rtx_insn *
5c8db5b4 3578prev_cc0_setter (rtx_insn *insn)
23b2ce53 3579{
906c4e36 3580 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3581
3582 if (note)
75b46023 3583 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3584
3585 insn = prev_nonnote_insn (insn);
5b0264cb 3586 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3587
dc01c3d1 3588 return insn;
23b2ce53 3589}
e5bef2e4 3590
594f8779
RZ
3591#ifdef AUTO_INC_DEC
3592/* Find a RTX_AUTOINC class rtx which matches DATA. */
3593
3594static int
9021b8ec 3595find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3596{
9021b8ec
RS
3597 subrtx_iterator::array_type array;
3598 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3599 {
9021b8ec
RS
3600 const_rtx x = *iter;
3601 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3602 && rtx_equal_p (reg, XEXP (x, 0)))
3603 return true;
594f8779 3604 }
9021b8ec 3605 return false;
594f8779
RZ
3606}
3607#endif
3608
e5bef2e4
HB
3609/* Increment the label uses for all labels present in rtx. */
3610
3611static void
502b8322 3612mark_label_nuses (rtx x)
e5bef2e4 3613{
b3694847
SS
3614 enum rtx_code code;
3615 int i, j;
3616 const char *fmt;
e5bef2e4
HB
3617
3618 code = GET_CODE (x);
a827d9b1
DM
3619 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3620 LABEL_NUSES (LABEL_REF_LABEL (x))++;
e5bef2e4
HB
3621
3622 fmt = GET_RTX_FORMAT (code);
3623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3624 {
3625 if (fmt[i] == 'e')
0fb7aeda 3626 mark_label_nuses (XEXP (x, i));
e5bef2e4 3627 else if (fmt[i] == 'E')
0fb7aeda 3628 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3629 mark_label_nuses (XVECEXP (x, i, j));
3630 }
3631}
3632
23b2ce53
RS
3633\f
3634/* Try splitting insns that can be split for better scheduling.
3635 PAT is the pattern which might split.
3636 TRIAL is the insn providing PAT.
cc2902df 3637 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3638
3639 If this routine succeeds in splitting, it returns the first or last
11147ebe 3640 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3641 returns TRIAL. If the insn to be returned can be split, it will be. */
3642
53f04688 3643rtx_insn *
bb5c4956 3644try_split (rtx pat, rtx_insn *trial, int last)
23b2ce53 3645{
53f04688
DM
3646 rtx_insn *before = PREV_INSN (trial);
3647 rtx_insn *after = NEXT_INSN (trial);
dc01c3d1
DM
3648 rtx note;
3649 rtx_insn *seq, *tem;
6b24c259 3650 int probability;
dc01c3d1 3651 rtx_insn *insn_last, *insn;
599aedd9 3652 int njumps = 0;
e67d1102 3653 rtx_insn *call_insn = NULL;
6b24c259 3654
cd9c1ca8
RH
3655 /* We're not good at redistributing frame information. */
3656 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3657 return trial;
cd9c1ca8 3658
6b24c259
JH
3659 if (any_condjump_p (trial)
3660 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
e5af9ddd 3661 split_branch_probability = XINT (note, 0);
6b24c259
JH
3662 probability = split_branch_probability;
3663
bb5c4956 3664 seq = split_insns (pat, trial);
6b24c259
JH
3665
3666 split_branch_probability = -1;
23b2ce53 3667
599aedd9 3668 if (!seq)
dc01c3d1 3669 return trial;
599aedd9
RH
3670
3671 /* Avoid infinite loop if any insn of the result matches
3672 the original pattern. */
3673 insn_last = seq;
3674 while (1)
23b2ce53 3675 {
599aedd9
RH
3676 if (INSN_P (insn_last)
3677 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3678 return trial;
599aedd9
RH
3679 if (!NEXT_INSN (insn_last))
3680 break;
3681 insn_last = NEXT_INSN (insn_last);
3682 }
750c9258 3683
6fb5fa3c
DB
3684 /* We will be adding the new sequence to the function. The splitters
3685 may have introduced invalid RTL sharing, so unshare the sequence now. */
3686 unshare_all_rtl_in_chain (seq);
3687
339ba33b 3688 /* Mark labels and copy flags. */
599aedd9
RH
3689 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3690 {
4b4bf941 3691 if (JUMP_P (insn))
599aedd9 3692 {
339ba33b
RS
3693 if (JUMP_P (trial))
3694 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3695 mark_jump_label (PATTERN (insn), insn, 0);
3696 njumps++;
3697 if (probability != -1
3698 && any_condjump_p (insn)
3699 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3700 {
599aedd9
RH
3701 /* We can preserve the REG_BR_PROB notes only if exactly
3702 one jump is created, otherwise the machine description
3703 is responsible for this step using
3704 split_branch_probability variable. */
5b0264cb 3705 gcc_assert (njumps == 1);
e5af9ddd 3706 add_int_reg_note (insn, REG_BR_PROB, probability);
2f937369 3707 }
599aedd9
RH
3708 }
3709 }
3710
3711 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3712 in SEQ and copy any additional information across. */
4b4bf941 3713 if (CALL_P (trial))
599aedd9
RH
3714 {
3715 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3716 if (CALL_P (insn))
599aedd9 3717 {
dc01c3d1
DM
3718 rtx_insn *next;
3719 rtx *p;
65712d5c 3720
4f660b15
RO
3721 gcc_assert (call_insn == NULL_RTX);
3722 call_insn = insn;
3723
65712d5c
RS
3724 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3725 target may have explicitly specified. */
3726 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3727 while (*p)
3728 p = &XEXP (*p, 1);
3729 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3730
3731 /* If the old call was a sibling call, the new one must
3732 be too. */
599aedd9 3733 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3734
3735 /* If the new call is the last instruction in the sequence,
3736 it will effectively replace the old call in-situ. Otherwise
3737 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3738 so that it comes immediately after the new call. */
3739 if (NEXT_INSN (insn))
65f3dedb
RS
3740 for (next = NEXT_INSN (trial);
3741 next && NOTE_P (next);
3742 next = NEXT_INSN (next))
3743 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3744 {
3745 remove_insn (next);
3746 add_insn_after (next, insn, NULL);
65f3dedb 3747 break;
65712d5c 3748 }
599aedd9
RH
3749 }
3750 }
4b5e8abe 3751
599aedd9
RH
3752 /* Copy notes, particularly those related to the CFG. */
3753 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3754 {
3755 switch (REG_NOTE_KIND (note))
3756 {
3757 case REG_EH_REGION:
1d65f45c 3758 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3759 break;
216183ce 3760
599aedd9
RH
3761 case REG_NORETURN:
3762 case REG_SETJMP:
0a35513e 3763 case REG_TM:
594f8779 3764 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3765 {
4b4bf941 3766 if (CALL_P (insn))
65c5f2a6 3767 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3768 }
599aedd9 3769 break;
d6e95df8 3770
599aedd9 3771 case REG_NON_LOCAL_GOTO:
594f8779 3772 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3773 {
4b4bf941 3774 if (JUMP_P (insn))
65c5f2a6 3775 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3776 }
599aedd9 3777 break;
e5bef2e4 3778
594f8779
RZ
3779#ifdef AUTO_INC_DEC
3780 case REG_INC:
3781 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3782 {
3783 rtx reg = XEXP (note, 0);
3784 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3785 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3786 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3787 }
3788 break;
3789#endif
3790
9a08d230 3791 case REG_ARGS_SIZE:
e5b51ca0 3792 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
9a08d230
RH
3793 break;
3794
4f660b15
RO
3795 case REG_CALL_DECL:
3796 gcc_assert (call_insn != NULL_RTX);
3797 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3798 break;
3799
599aedd9
RH
3800 default:
3801 break;
23b2ce53 3802 }
599aedd9
RH
3803 }
3804
3805 /* If there are LABELS inside the split insns increment the
3806 usage count so we don't delete the label. */
cf7c4aa6 3807 if (INSN_P (trial))
599aedd9
RH
3808 {
3809 insn = insn_last;
3810 while (insn != NULL_RTX)
23b2ce53 3811 {
cf7c4aa6 3812 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3813 if (NONJUMP_INSN_P (insn))
599aedd9 3814 mark_label_nuses (PATTERN (insn));
23b2ce53 3815
599aedd9
RH
3816 insn = PREV_INSN (insn);
3817 }
23b2ce53
RS
3818 }
3819
5368224f 3820 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3821
3822 delete_insn (trial);
599aedd9
RH
3823
3824 /* Recursively call try_split for each new insn created; by the
3825 time control returns here that insn will be fully split, so
3826 set LAST and continue from the insn after the one returned.
3827 We can't use next_active_insn here since AFTER may be a note.
3828 Ignore deleted insns, which can be occur if not optimizing. */
3829 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4654c0cf 3830 if (! tem->deleted () && INSN_P (tem))
599aedd9
RH
3831 tem = try_split (PATTERN (tem), tem, 1);
3832
3833 /* Return either the first or the last insn, depending on which was
3834 requested. */
3835 return last
5936d944 3836 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3837 : NEXT_INSN (before);
23b2ce53
RS
3838}
3839\f
3840/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3841 Store PATTERN in the pattern slots. */
23b2ce53 3842
167b9fae 3843rtx_insn *
502b8322 3844make_insn_raw (rtx pattern)
23b2ce53 3845{
167b9fae 3846 rtx_insn *insn;
23b2ce53 3847
167b9fae 3848 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 3849
43127294 3850 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3851 PATTERN (insn) = pattern;
3852 INSN_CODE (insn) = -1;
1632afca 3853 REG_NOTES (insn) = NULL;
5368224f 3854 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3855 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3856
47984720
NC
3857#ifdef ENABLE_RTL_CHECKING
3858 if (insn
2c3c49de 3859 && INSN_P (insn)
47984720
NC
3860 && (returnjump_p (insn)
3861 || (GET_CODE (insn) == SET
3862 && SET_DEST (insn) == pc_rtx)))
3863 {
d4ee4d25 3864 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3865 debug_rtx (insn);
3866 }
3867#endif
750c9258 3868
23b2ce53
RS
3869 return insn;
3870}
3871
b5b8b0ac
AO
3872/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3873
167b9fae 3874static rtx_insn *
b5b8b0ac
AO
3875make_debug_insn_raw (rtx pattern)
3876{
167b9fae 3877 rtx_debug_insn *insn;
b5b8b0ac 3878
167b9fae 3879 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
3880 INSN_UID (insn) = cur_debug_insn_uid++;
3881 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3882 INSN_UID (insn) = cur_insn_uid++;
3883
3884 PATTERN (insn) = pattern;
3885 INSN_CODE (insn) = -1;
3886 REG_NOTES (insn) = NULL;
5368224f 3887 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3888 BLOCK_FOR_INSN (insn) = NULL;
3889
3890 return insn;
3891}
3892
2f937369 3893/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3894
167b9fae 3895static rtx_insn *
502b8322 3896make_jump_insn_raw (rtx pattern)
23b2ce53 3897{
167b9fae 3898 rtx_jump_insn *insn;
23b2ce53 3899
167b9fae 3900 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 3901 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3902
3903 PATTERN (insn) = pattern;
3904 INSN_CODE (insn) = -1;
1632afca
RS
3905 REG_NOTES (insn) = NULL;
3906 JUMP_LABEL (insn) = NULL;
5368224f 3907 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3908 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3909
3910 return insn;
3911}
aff507f4 3912
2f937369 3913/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 3914
167b9fae 3915static rtx_insn *
502b8322 3916make_call_insn_raw (rtx pattern)
aff507f4 3917{
167b9fae 3918 rtx_call_insn *insn;
aff507f4 3919
167b9fae 3920 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
3921 INSN_UID (insn) = cur_insn_uid++;
3922
3923 PATTERN (insn) = pattern;
3924 INSN_CODE (insn) = -1;
aff507f4
RK
3925 REG_NOTES (insn) = NULL;
3926 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3927 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3928 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3929
3930 return insn;
3931}
96fba521
SB
3932
3933/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3934
66e8df53 3935static rtx_note *
96fba521
SB
3936make_note_raw (enum insn_note subtype)
3937{
3938 /* Some notes are never created this way at all. These notes are
3939 only created by patching out insns. */
3940 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3941 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3942
66e8df53 3943 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
3944 INSN_UID (note) = cur_insn_uid++;
3945 NOTE_KIND (note) = subtype;
3946 BLOCK_FOR_INSN (note) = NULL;
3947 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3948 return note;
3949}
23b2ce53 3950\f
96fba521
SB
3951/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3952 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3953 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3954
3955static inline void
9152e0aa 3956link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 3957{
0f82e5c9
DM
3958 SET_PREV_INSN (insn) = prev;
3959 SET_NEXT_INSN (insn) = next;
96fba521
SB
3960 if (prev != NULL)
3961 {
0f82e5c9 3962 SET_NEXT_INSN (prev) = insn;
96fba521
SB
3963 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3964 {
e6eda746
DM
3965 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3966 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
3967 }
3968 }
3969 if (next != NULL)
3970 {
0f82e5c9 3971 SET_PREV_INSN (next) = insn;
96fba521 3972 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
3973 {
3974 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3975 SET_PREV_INSN (sequence->insn (0)) = insn;
3976 }
96fba521 3977 }
3ccb989e
SB
3978
3979 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3980 {
e6eda746
DM
3981 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3982 SET_PREV_INSN (sequence->insn (0)) = prev;
3983 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 3984 }
96fba521
SB
3985}
3986
23b2ce53
RS
3987/* Add INSN to the end of the doubly-linked list.
3988 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3989
3990void
9152e0aa 3991add_insn (rtx_insn *insn)
23b2ce53 3992{
9152e0aa 3993 rtx_insn *prev = get_last_insn ();
96fba521 3994 link_insn_into_chain (insn, prev, NULL);
5936d944
JH
3995 if (NULL == get_insns ())
3996 set_first_insn (insn);
5936d944 3997 set_last_insn (insn);
23b2ce53
RS
3998}
3999
96fba521 4000/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 4001
96fba521 4002static void
9152e0aa 4003add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 4004{
9152e0aa 4005 rtx_insn *next = NEXT_INSN (after);
23b2ce53 4006
4654c0cf 4007 gcc_assert (!optimize || !after->deleted ());
ba213285 4008
96fba521 4009 link_insn_into_chain (insn, after, next);
23b2ce53 4010
96fba521 4011 if (next == NULL)
23b2ce53 4012 {
614d5bd8
AM
4013 struct sequence_stack *seq;
4014
4015 for (seq = get_current_sequence (); seq; seq = seq->next)
4016 if (after == seq->last)
4017 {
4018 seq->last = insn;
4019 break;
4020 }
23b2ce53 4021 }
96fba521
SB
4022}
4023
4024/* Add INSN into the doubly-linked list before insn BEFORE. */
4025
4026static void
9152e0aa 4027add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4028{
9152e0aa 4029 rtx_insn *prev = PREV_INSN (before);
96fba521 4030
4654c0cf 4031 gcc_assert (!optimize || !before->deleted ());
96fba521
SB
4032
4033 link_insn_into_chain (insn, prev, before);
4034
4035 if (prev == NULL)
23b2ce53 4036 {
614d5bd8 4037 struct sequence_stack *seq;
a0ae8e8d 4038
614d5bd8
AM
4039 for (seq = get_current_sequence (); seq; seq = seq->next)
4040 if (before == seq->first)
4041 {
4042 seq->first = insn;
4043 break;
4044 }
4045
4046 gcc_assert (seq);
23b2ce53 4047 }
96fba521
SB
4048}
4049
4050/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4051 If BB is NULL, an attempt is made to infer the bb from before.
4052
4053 This and the next function should be the only functions called
4054 to insert an insn once delay slots have been filled since only
4055 they know how to update a SEQUENCE. */
23b2ce53 4056
96fba521 4057void
9152e0aa 4058add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
96fba521 4059{
1130d5e3 4060 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
9152e0aa 4061 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
96fba521 4062 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4063 if (!BARRIER_P (after)
4064 && !BARRIER_P (insn)
3c030e88
JH
4065 && (bb = BLOCK_FOR_INSN (after)))
4066 {
4067 set_block_for_insn (insn, bb);
38c1593d 4068 if (INSN_P (insn))
6fb5fa3c 4069 df_insn_rescan (insn);
3c030e88 4070 /* Should not happen as first in the BB is always
a1f300c0 4071 either NOTE or LABEL. */
a813c111 4072 if (BB_END (bb) == after
3c030e88 4073 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4074 && !BARRIER_P (insn)
a38e7aa5 4075 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4076 BB_END (bb) = insn;
3c030e88 4077 }
23b2ce53
RS
4078}
4079
96fba521
SB
4080/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4081 If BB is NULL, an attempt is made to infer the bb from before.
4082
4083 This and the previous function should be the only functions called
4084 to insert an insn once delay slots have been filled since only
4085 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4086
4087void
9152e0aa 4088add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
a0ae8e8d 4089{
9152e0aa
DM
4090 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4091 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
96fba521 4092 add_insn_before_nobb (insn, before);
a0ae8e8d 4093
b8698a0f 4094 if (!bb
6fb5fa3c
DB
4095 && !BARRIER_P (before)
4096 && !BARRIER_P (insn))
4097 bb = BLOCK_FOR_INSN (before);
4098
4099 if (bb)
3c030e88
JH
4100 {
4101 set_block_for_insn (insn, bb);
38c1593d 4102 if (INSN_P (insn))
6fb5fa3c 4103 df_insn_rescan (insn);
5b0264cb 4104 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4105 LABEL. */
5b0264cb
NS
4106 gcc_assert (BB_HEAD (bb) != insn
4107 /* Avoid clobbering of structure when creating new BB. */
4108 || BARRIER_P (insn)
a38e7aa5 4109 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4110 }
a0ae8e8d
RK
4111}
4112
6fb5fa3c
DB
4113/* Replace insn with an deleted instruction note. */
4114
0ce2b299
EB
4115void
4116set_insn_deleted (rtx insn)
6fb5fa3c 4117{
39718607 4118 if (INSN_P (insn))
b2908ba6 4119 df_insn_delete (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
4120 PUT_CODE (insn, NOTE);
4121 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4122}
4123
4124
1f397f45
SB
4125/* Unlink INSN from the insn chain.
4126
4127 This function knows how to handle sequences.
4128
4129 This function does not invalidate data flow information associated with
4130 INSN (i.e. does not call df_insn_delete). That makes this function
4131 usable for only disconnecting an insn from the chain, and re-emit it
4132 elsewhere later.
4133
4134 To later insert INSN elsewhere in the insn chain via add_insn and
4135 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4136 the caller. Nullifying them here breaks many insn chain walks.
4137
4138 To really delete an insn and related DF information, use delete_insn. */
4139
89e99eea 4140void
dc01c3d1 4141remove_insn (rtx uncast_insn)
89e99eea 4142{
dc01c3d1 4143 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1130d5e3
DM
4144 rtx_insn *next = NEXT_INSN (insn);
4145 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4146 basic_block bb;
4147
89e99eea
DB
4148 if (prev)
4149 {
0f82e5c9 4150 SET_NEXT_INSN (prev) = next;
4b4bf941 4151 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4152 {
e6eda746
DM
4153 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4154 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4155 }
4156 }
89e99eea
DB
4157 else
4158 {
614d5bd8
AM
4159 struct sequence_stack *seq;
4160
4161 for (seq = get_current_sequence (); seq; seq = seq->next)
4162 if (insn == seq->first)
89e99eea 4163 {
614d5bd8 4164 seq->first = next;
89e99eea
DB
4165 break;
4166 }
4167
614d5bd8 4168 gcc_assert (seq);
89e99eea
DB
4169 }
4170
4171 if (next)
4172 {
0f82e5c9 4173 SET_PREV_INSN (next) = prev;
4b4bf941 4174 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4175 {
4176 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4177 SET_PREV_INSN (sequence->insn (0)) = prev;
4178 }
89e99eea 4179 }
89e99eea
DB
4180 else
4181 {
614d5bd8
AM
4182 struct sequence_stack *seq;
4183
4184 for (seq = get_current_sequence (); seq; seq = seq->next)
4185 if (insn == seq->last)
89e99eea 4186 {
614d5bd8 4187 seq->last = prev;
89e99eea
DB
4188 break;
4189 }
4190
614d5bd8 4191 gcc_assert (seq);
89e99eea 4192 }
80eb8028 4193
80eb8028 4194 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4195 if (!BARRIER_P (insn)
53c17031
JH
4196 && (bb = BLOCK_FOR_INSN (insn)))
4197 {
a813c111 4198 if (BB_HEAD (bb) == insn)
53c17031 4199 {
3bf1e984
RK
4200 /* Never ever delete the basic block note without deleting whole
4201 basic block. */
5b0264cb 4202 gcc_assert (!NOTE_P (insn));
1130d5e3 4203 BB_HEAD (bb) = next;
53c17031 4204 }
a813c111 4205 if (BB_END (bb) == insn)
1130d5e3 4206 BB_END (bb) = prev;
53c17031 4207 }
89e99eea
DB
4208}
4209
ee960939
OH
4210/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4211
4212void
502b8322 4213add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4214{
5b0264cb 4215 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4216
4217 /* Put the register usage information on the CALL. If there is already
4218 some usage information, put ours at the end. */
4219 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4220 {
4221 rtx link;
4222
4223 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4224 link = XEXP (link, 1))
4225 ;
4226
4227 XEXP (link, 1) = call_fusage;
4228 }
4229 else
4230 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4231}
4232
23b2ce53
RS
4233/* Delete all insns made since FROM.
4234 FROM becomes the new last instruction. */
4235
4236void
fee3e72c 4237delete_insns_since (rtx_insn *from)
23b2ce53
RS
4238{
4239 if (from == 0)
5936d944 4240 set_first_insn (0);
23b2ce53 4241 else
0f82e5c9 4242 SET_NEXT_INSN (from) = 0;
5936d944 4243 set_last_insn (from);
23b2ce53
RS
4244}
4245
5dab5552
MS
4246/* This function is deprecated, please use sequences instead.
4247
4248 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4249 The insns to be moved are those between FROM and TO.
4250 They are moved to a new position after the insn AFTER.
4251 AFTER must not be FROM or TO or any insn in between.
4252
4253 This function does not know about SEQUENCEs and hence should not be
4254 called after delay-slot filling has been done. */
4255
4256void
fee3e72c 4257reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4258{
4f8344eb 4259#ifdef ENABLE_CHECKING
fee3e72c 4260 rtx_insn *x;
4f8344eb
HPN
4261 for (x = from; x != to; x = NEXT_INSN (x))
4262 gcc_assert (after != x);
4263 gcc_assert (after != to);
4264#endif
4265
23b2ce53
RS
4266 /* Splice this bunch out of where it is now. */
4267 if (PREV_INSN (from))
0f82e5c9 4268 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4269 if (NEXT_INSN (to))
0f82e5c9 4270 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4271 if (get_last_insn () == to)
4272 set_last_insn (PREV_INSN (from));
4273 if (get_insns () == from)
4274 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4275
4276 /* Make the new neighbors point to it and it to them. */
4277 if (NEXT_INSN (after))
0f82e5c9 4278 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4279
0f82e5c9
DM
4280 SET_NEXT_INSN (to) = NEXT_INSN (after);
4281 SET_PREV_INSN (from) = after;
4282 SET_NEXT_INSN (after) = from;
c3284718 4283 if (after == get_last_insn ())
5936d944 4284 set_last_insn (to);
23b2ce53
RS
4285}
4286
3c030e88
JH
4287/* Same as function above, but take care to update BB boundaries. */
4288void
ac9d2d2c 4289reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4290{
ac9d2d2c 4291 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4292 basic_block bb, bb2;
4293
4294 reorder_insns_nobb (from, to, after);
4295
4b4bf941 4296 if (!BARRIER_P (after)
3c030e88
JH
4297 && (bb = BLOCK_FOR_INSN (after)))
4298 {
b2908ba6 4299 rtx_insn *x;
6fb5fa3c 4300 df_set_bb_dirty (bb);
68252e27 4301
4b4bf941 4302 if (!BARRIER_P (from)
3c030e88
JH
4303 && (bb2 = BLOCK_FOR_INSN (from)))
4304 {
a813c111 4305 if (BB_END (bb2) == to)
1130d5e3 4306 BB_END (bb2) = prev;
6fb5fa3c 4307 df_set_bb_dirty (bb2);
3c030e88
JH
4308 }
4309
a813c111 4310 if (BB_END (bb) == after)
1130d5e3 4311 BB_END (bb) = to;
3c030e88
JH
4312
4313 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4314 if (!BARRIER_P (x))
63642d5a 4315 df_insn_change_bb (x, bb);
3c030e88
JH
4316 }
4317}
4318
23b2ce53 4319\f
2f937369
DM
4320/* Emit insn(s) of given code and pattern
4321 at a specified place within the doubly-linked list.
23b2ce53 4322
2f937369
DM
4323 All of the emit_foo global entry points accept an object
4324 X which is either an insn list or a PATTERN of a single
4325 instruction.
23b2ce53 4326
2f937369
DM
4327 There are thus a few canonical ways to generate code and
4328 emit it at a specific place in the instruction stream. For
4329 example, consider the instruction named SPOT and the fact that
4330 we would like to emit some instructions before SPOT. We might
4331 do it like this:
23b2ce53 4332
2f937369
DM
4333 start_sequence ();
4334 ... emit the new instructions ...
4335 insns_head = get_insns ();
4336 end_sequence ();
23b2ce53 4337
2f937369 4338 emit_insn_before (insns_head, SPOT);
23b2ce53 4339
2f937369
DM
4340 It used to be common to generate SEQUENCE rtl instead, but that
4341 is a relic of the past which no longer occurs. The reason is that
4342 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4343 generated would almost certainly die right after it was created. */
23b2ce53 4344
cd459bf8 4345static rtx_insn *
5f02387d 4346emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
167b9fae 4347 rtx_insn *(*make_raw) (rtx))
23b2ce53 4348{
167b9fae 4349 rtx_insn *insn;
23b2ce53 4350
5b0264cb 4351 gcc_assert (before);
2f937369
DM
4352
4353 if (x == NULL_RTX)
cd459bf8 4354 return safe_as_a <rtx_insn *> (last);
2f937369
DM
4355
4356 switch (GET_CODE (x))
23b2ce53 4357 {
b5b8b0ac 4358 case DEBUG_INSN:
2f937369
DM
4359 case INSN:
4360 case JUMP_INSN:
4361 case CALL_INSN:
4362 case CODE_LABEL:
4363 case BARRIER:
4364 case NOTE:
167b9fae 4365 insn = as_a <rtx_insn *> (x);
2f937369
DM
4366 while (insn)
4367 {
167b9fae 4368 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4369 add_insn_before (insn, before, bb);
2f937369
DM
4370 last = insn;
4371 insn = next;
4372 }
4373 break;
4374
4375#ifdef ENABLE_RTL_CHECKING
4376 case SEQUENCE:
5b0264cb 4377 gcc_unreachable ();
2f937369
DM
4378 break;
4379#endif
4380
4381 default:
5f02387d 4382 last = (*make_raw) (x);
6fb5fa3c 4383 add_insn_before (last, before, bb);
2f937369 4384 break;
23b2ce53
RS
4385 }
4386
cd459bf8 4387 return safe_as_a <rtx_insn *> (last);
23b2ce53
RS
4388}
4389
5f02387d
NF
4390/* Make X be output before the instruction BEFORE. */
4391
cd459bf8 4392rtx_insn *
596f2b17 4393emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f02387d
NF
4394{
4395 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4396}
4397
2f937369 4398/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4399 and output it before the instruction BEFORE. */
4400
1476d1bd 4401rtx_jump_insn *
596f2b17 4402emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
23b2ce53 4403{
1476d1bd
MM
4404 return as_a <rtx_jump_insn *> (
4405 emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4406 make_jump_insn_raw));
23b2ce53
RS
4407}
4408
2f937369 4409/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4410 and output it before the instruction BEFORE. */
4411
cd459bf8 4412rtx_insn *
596f2b17 4413emit_call_insn_before_noloc (rtx x, rtx_insn *before)
969d70ca 4414{
5f02387d
NF
4415 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4416 make_call_insn_raw);
969d70ca
JH
4417}
4418
b5b8b0ac
AO
4419/* Make an instruction with body X and code DEBUG_INSN
4420 and output it before the instruction BEFORE. */
4421
cd459bf8 4422rtx_insn *
b5b8b0ac
AO
4423emit_debug_insn_before_noloc (rtx x, rtx before)
4424{
5f02387d
NF
4425 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4426 make_debug_insn_raw);
b5b8b0ac
AO
4427}
4428
23b2ce53 4429/* Make an insn of code BARRIER
e881bb1b 4430 and output it before the insn BEFORE. */
23b2ce53 4431
cd459bf8 4432rtx_barrier *
502b8322 4433emit_barrier_before (rtx before)
23b2ce53 4434{
cd459bf8 4435 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4436
4437 INSN_UID (insn) = cur_insn_uid++;
4438
6fb5fa3c 4439 add_insn_before (insn, before, NULL);
23b2ce53
RS
4440 return insn;
4441}
4442
e881bb1b
RH
4443/* Emit the label LABEL before the insn BEFORE. */
4444
1476d1bd 4445rtx_code_label *
596f2b17 4446emit_label_before (rtx label, rtx_insn *before)
e881bb1b 4447{
468660d3
SB
4448 gcc_checking_assert (INSN_UID (label) == 0);
4449 INSN_UID (label) = cur_insn_uid++;
4450 add_insn_before (label, before, NULL);
1476d1bd 4451 return as_a <rtx_code_label *> (label);
e881bb1b 4452}
23b2ce53 4453\f
2f937369
DM
4454/* Helper for emit_insn_after, handles lists of instructions
4455 efficiently. */
23b2ce53 4456
e6eda746
DM
4457static rtx_insn *
4458emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
23b2ce53 4459{
e6eda746 4460 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
1130d5e3
DM
4461 rtx_insn *last;
4462 rtx_insn *after_after;
6fb5fa3c
DB
4463 if (!bb && !BARRIER_P (after))
4464 bb = BLOCK_FOR_INSN (after);
23b2ce53 4465
6fb5fa3c 4466 if (bb)
23b2ce53 4467 {
6fb5fa3c 4468 df_set_bb_dirty (bb);
2f937369 4469 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4470 if (!BARRIER_P (last))
6fb5fa3c
DB
4471 {
4472 set_block_for_insn (last, bb);
4473 df_insn_rescan (last);
4474 }
4b4bf941 4475 if (!BARRIER_P (last))
6fb5fa3c
DB
4476 {
4477 set_block_for_insn (last, bb);
4478 df_insn_rescan (last);
4479 }
a813c111 4480 if (BB_END (bb) == after)
1130d5e3 4481 BB_END (bb) = last;
23b2ce53
RS
4482 }
4483 else
2f937369
DM
4484 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4485 continue;
4486
4487 after_after = NEXT_INSN (after);
4488
0f82e5c9
DM
4489 SET_NEXT_INSN (after) = first;
4490 SET_PREV_INSN (first) = after;
4491 SET_NEXT_INSN (last) = after_after;
2f937369 4492 if (after_after)
0f82e5c9 4493 SET_PREV_INSN (after_after) = last;
2f937369 4494
c3284718 4495 if (after == get_last_insn ())
5936d944 4496 set_last_insn (last);
e855c69d 4497
2f937369
DM
4498 return last;
4499}
4500
cd459bf8 4501static rtx_insn *
e6eda746 4502emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
167b9fae 4503 rtx_insn *(*make_raw)(rtx))
2f937369 4504{
e6eda746
DM
4505 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4506 rtx_insn *last = after;
2f937369 4507
5b0264cb 4508 gcc_assert (after);
2f937369
DM
4509
4510 if (x == NULL_RTX)
e6eda746 4511 return last;
2f937369
DM
4512
4513 switch (GET_CODE (x))
23b2ce53 4514 {
b5b8b0ac 4515 case DEBUG_INSN:
2f937369
DM
4516 case INSN:
4517 case JUMP_INSN:
4518 case CALL_INSN:
4519 case CODE_LABEL:
4520 case BARRIER:
4521 case NOTE:
1130d5e3 4522 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4523 break;
4524
4525#ifdef ENABLE_RTL_CHECKING
4526 case SEQUENCE:
5b0264cb 4527 gcc_unreachable ();
2f937369
DM
4528 break;
4529#endif
4530
4531 default:
5f02387d 4532 last = (*make_raw) (x);
6fb5fa3c 4533 add_insn_after (last, after, bb);
2f937369 4534 break;
23b2ce53
RS
4535 }
4536
e6eda746 4537 return last;
23b2ce53
RS
4538}
4539
5f02387d
NF
4540/* Make X be output after the insn AFTER and set the BB of insn. If
4541 BB is NULL, an attempt is made to infer the BB from AFTER. */
4542
cd459bf8 4543rtx_insn *
5f02387d
NF
4544emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4545{
4546 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4547}
4548
255680cf 4549
2f937369 4550/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4551 and output it after the insn AFTER. */
4552
1476d1bd 4553rtx_jump_insn *
a7102479 4554emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4555{
1476d1bd
MM
4556 return as_a <rtx_jump_insn *> (
4557 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
2f937369
DM
4558}
4559
4560/* Make an instruction with body X and code CALL_INSN
4561 and output it after the instruction AFTER. */
4562
cd459bf8 4563rtx_insn *
a7102479 4564emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4565{
5f02387d 4566 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4567}
4568
b5b8b0ac
AO
4569/* Make an instruction with body X and code CALL_INSN
4570 and output it after the instruction AFTER. */
4571
cd459bf8 4572rtx_insn *
b5b8b0ac
AO
4573emit_debug_insn_after_noloc (rtx x, rtx after)
4574{
5f02387d 4575 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4576}
4577
23b2ce53
RS
4578/* Make an insn of code BARRIER
4579 and output it after the insn AFTER. */
4580
cd459bf8 4581rtx_barrier *
502b8322 4582emit_barrier_after (rtx after)
23b2ce53 4583{
cd459bf8 4584 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4585
4586 INSN_UID (insn) = cur_insn_uid++;
4587
6fb5fa3c 4588 add_insn_after (insn, after, NULL);
23b2ce53
RS
4589 return insn;
4590}
4591
4592/* Emit the label LABEL after the insn AFTER. */
4593
cd459bf8 4594rtx_insn *
596f2b17 4595emit_label_after (rtx label, rtx_insn *after)
23b2ce53 4596{
468660d3
SB
4597 gcc_checking_assert (INSN_UID (label) == 0);
4598 INSN_UID (label) = cur_insn_uid++;
4599 add_insn_after (label, after, NULL);
cd459bf8 4600 return as_a <rtx_insn *> (label);
23b2ce53 4601}
96fba521
SB
4602\f
4603/* Notes require a bit of special handling: Some notes need to have their
4604 BLOCK_FOR_INSN set, others should never have it set, and some should
4605 have it set or clear depending on the context. */
4606
4607/* Return true iff a note of kind SUBTYPE should be emitted with routines
4608 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4609 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4610
4611static bool
4612note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4613{
4614 switch (subtype)
4615 {
4616 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4617 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4618 return true;
4619
4620 /* Notes for var tracking and EH region markers can appear between or
4621 inside basic blocks. If the caller is emitting on the basic block
4622 boundary, do not set BLOCK_FOR_INSN on the new note. */
4623 case NOTE_INSN_VAR_LOCATION:
4624 case NOTE_INSN_CALL_ARG_LOCATION:
4625 case NOTE_INSN_EH_REGION_BEG:
4626 case NOTE_INSN_EH_REGION_END:
4627 return on_bb_boundary_p;
4628
4629 /* Otherwise, BLOCK_FOR_INSN must be set. */
4630 default:
4631 return false;
4632 }
4633}
23b2ce53
RS
4634
4635/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4636
66e8df53 4637rtx_note *
589e43f9 4638emit_note_after (enum insn_note subtype, rtx_insn *after)
23b2ce53 4639{
66e8df53 4640 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4641 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4642 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4643
4644 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4645 add_insn_after_nobb (note, after);
4646 else
4647 add_insn_after (note, after, bb);
4648 return note;
4649}
4650
4651/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4652
66e8df53 4653rtx_note *
89b6250d 4654emit_note_before (enum insn_note subtype, rtx_insn *before)
96fba521 4655{
66e8df53 4656 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4657 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4658 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4659
4660 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4661 add_insn_before_nobb (note, before);
4662 else
4663 add_insn_before (note, before, bb);
23b2ce53
RS
4664 return note;
4665}
23b2ce53 4666\f
e8110d6f
NF
4667/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4668 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4669
cd459bf8 4670static rtx_insn *
dc01c3d1 4671emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
167b9fae 4672 rtx_insn *(*make_raw) (rtx))
0d682900 4673{
dc01c3d1 4674 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
e67d1102 4675 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4676
a7102479 4677 if (pattern == NULL_RTX || !loc)
e67d1102 4678 return last;
dd3adcf8 4679
2f937369
DM
4680 after = NEXT_INSN (after);
4681 while (1)
4682 {
20d4397a
EB
4683 if (active_insn_p (after)
4684 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4685 && !INSN_LOCATION (after))
5368224f 4686 INSN_LOCATION (after) = loc;
2f937369
DM
4687 if (after == last)
4688 break;
4689 after = NEXT_INSN (after);
4690 }
e67d1102 4691 return last;
0d682900
JH
4692}
4693
e8110d6f
NF
4694/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4695 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4696 any DEBUG_INSNs. */
4697
cd459bf8 4698static rtx_insn *
dc01c3d1 4699emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
167b9fae 4700 rtx_insn *(*make_raw) (rtx))
a7102479 4701{
dc01c3d1
DM
4702 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4703 rtx_insn *prev = after;
b5b8b0ac 4704
e8110d6f
NF
4705 if (skip_debug_insns)
4706 while (DEBUG_INSN_P (prev))
4707 prev = PREV_INSN (prev);
b5b8b0ac
AO
4708
4709 if (INSN_P (prev))
5368224f 4710 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4711 make_raw);
a7102479 4712 else
e8110d6f 4713 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4714}
4715
5368224f 4716/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4717rtx_insn *
e8110d6f 4718emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4719{
e8110d6f
NF
4720 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4721}
2f937369 4722
5368224f 4723/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4724rtx_insn *
e8110d6f
NF
4725emit_insn_after (rtx pattern, rtx after)
4726{
4727 return emit_pattern_after (pattern, after, true, make_insn_raw);
4728}
dd3adcf8 4729
5368224f 4730/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4731rtx_jump_insn *
e8110d6f
NF
4732emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4733{
1476d1bd
MM
4734 return as_a <rtx_jump_insn *> (
4735 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
0d682900
JH
4736}
4737
5368224f 4738/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
1476d1bd 4739rtx_jump_insn *
a7102479
JH
4740emit_jump_insn_after (rtx pattern, rtx after)
4741{
1476d1bd
MM
4742 return as_a <rtx_jump_insn *> (
4743 emit_pattern_after (pattern, after, true, make_jump_insn_raw));
a7102479
JH
4744}
4745
5368224f 4746/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4747rtx_insn *
502b8322 4748emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4749{
e8110d6f 4750 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4751}
4752
5368224f 4753/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4754rtx_insn *
a7102479
JH
4755emit_call_insn_after (rtx pattern, rtx after)
4756{
e8110d6f 4757 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4758}
4759
5368224f 4760/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4761rtx_insn *
b5b8b0ac
AO
4762emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4763{
e8110d6f 4764 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4765}
4766
5368224f 4767/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4768rtx_insn *
b5b8b0ac
AO
4769emit_debug_insn_after (rtx pattern, rtx after)
4770{
e8110d6f 4771 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4772}
4773
e8110d6f
NF
4774/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4775 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4776 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4777 CALL_INSN, etc. */
4778
cd459bf8 4779static rtx_insn *
dc01c3d1 4780emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
167b9fae 4781 rtx_insn *(*make_raw) (rtx))
0d682900 4782{
dc01c3d1
DM
4783 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4784 rtx_insn *first = PREV_INSN (before);
4785 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4786 insnp ? before : NULL_RTX,
4787 NULL, make_raw);
a7102479
JH
4788
4789 if (pattern == NULL_RTX || !loc)
dc01c3d1 4790 return last;
a7102479 4791
26cb3993
JH
4792 if (!first)
4793 first = get_insns ();
4794 else
4795 first = NEXT_INSN (first);
a7102479
JH
4796 while (1)
4797 {
20d4397a
EB
4798 if (active_insn_p (first)
4799 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4800 && !INSN_LOCATION (first))
5368224f 4801 INSN_LOCATION (first) = loc;
a7102479
JH
4802 if (first == last)
4803 break;
4804 first = NEXT_INSN (first);
4805 }
dc01c3d1 4806 return last;
a7102479
JH
4807}
4808
e8110d6f
NF
4809/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4810 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4811 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4812 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4813
cd459bf8 4814static rtx_insn *
dc01c3d1 4815emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
167b9fae 4816 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 4817{
dc01c3d1
DM
4818 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4819 rtx_insn *next = before;
b5b8b0ac 4820
e8110d6f
NF
4821 if (skip_debug_insns)
4822 while (DEBUG_INSN_P (next))
4823 next = PREV_INSN (next);
b5b8b0ac
AO
4824
4825 if (INSN_P (next))
5368224f 4826 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4827 insnp, make_raw);
a7102479 4828 else
e8110d6f
NF
4829 return emit_pattern_before_noloc (pattern, before,
4830 insnp ? before : NULL_RTX,
4831 NULL, make_raw);
a7102479
JH
4832}
4833
5368224f 4834/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4835rtx_insn *
596f2b17 4836emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4837{
e8110d6f
NF
4838 return emit_pattern_before_setloc (pattern, before, loc, true,
4839 make_insn_raw);
4840}
a7102479 4841
5368224f 4842/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4843rtx_insn *
e8110d6f
NF
4844emit_insn_before (rtx pattern, rtx before)
4845{
4846 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4847}
a7102479 4848
5368224f 4849/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
1476d1bd 4850rtx_jump_insn *
596f2b17 4851emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
e8110d6f 4852{
1476d1bd
MM
4853 return as_a <rtx_jump_insn *> (
4854 emit_pattern_before_setloc (pattern, before, loc, false,
4855 make_jump_insn_raw));
a7102479
JH
4856}
4857
5368224f 4858/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
1476d1bd 4859rtx_jump_insn *
a7102479
JH
4860emit_jump_insn_before (rtx pattern, rtx before)
4861{
1476d1bd
MM
4862 return as_a <rtx_jump_insn *> (
4863 emit_pattern_before (pattern, before, true, false,
4864 make_jump_insn_raw));
a7102479
JH
4865}
4866
5368224f 4867/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4868rtx_insn *
596f2b17 4869emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4870{
e8110d6f
NF
4871 return emit_pattern_before_setloc (pattern, before, loc, false,
4872 make_call_insn_raw);
0d682900 4873}
a7102479 4874
e8110d6f 4875/* Like emit_call_insn_before_noloc,
5368224f 4876 but set insn_location according to BEFORE. */
cd459bf8 4877rtx_insn *
596f2b17 4878emit_call_insn_before (rtx pattern, rtx_insn *before)
a7102479 4879{
e8110d6f
NF
4880 return emit_pattern_before (pattern, before, true, false,
4881 make_call_insn_raw);
a7102479 4882}
b5b8b0ac 4883
5368224f 4884/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4885rtx_insn *
b5b8b0ac
AO
4886emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4887{
e8110d6f
NF
4888 return emit_pattern_before_setloc (pattern, before, loc, false,
4889 make_debug_insn_raw);
b5b8b0ac
AO
4890}
4891
e8110d6f 4892/* Like emit_debug_insn_before_noloc,
5368224f 4893 but set insn_location according to BEFORE. */
cd459bf8 4894rtx_insn *
3a6216b0 4895emit_debug_insn_before (rtx pattern, rtx_insn *before)
b5b8b0ac 4896{
e8110d6f
NF
4897 return emit_pattern_before (pattern, before, false, false,
4898 make_debug_insn_raw);
b5b8b0ac 4899}
0d682900 4900\f
2f937369
DM
4901/* Take X and emit it at the end of the doubly-linked
4902 INSN list.
23b2ce53
RS
4903
4904 Returns the last insn emitted. */
4905
cd459bf8 4906rtx_insn *
502b8322 4907emit_insn (rtx x)
23b2ce53 4908{
cd459bf8
DM
4909 rtx_insn *last = get_last_insn ();
4910 rtx_insn *insn;
23b2ce53 4911
2f937369
DM
4912 if (x == NULL_RTX)
4913 return last;
23b2ce53 4914
2f937369
DM
4915 switch (GET_CODE (x))
4916 {
b5b8b0ac 4917 case DEBUG_INSN:
2f937369
DM
4918 case INSN:
4919 case JUMP_INSN:
4920 case CALL_INSN:
4921 case CODE_LABEL:
4922 case BARRIER:
4923 case NOTE:
cd459bf8 4924 insn = as_a <rtx_insn *> (x);
2f937369 4925 while (insn)
23b2ce53 4926 {
cd459bf8 4927 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 4928 add_insn (insn);
2f937369
DM
4929 last = insn;
4930 insn = next;
23b2ce53 4931 }
2f937369 4932 break;
23b2ce53 4933
2f937369 4934#ifdef ENABLE_RTL_CHECKING
39718607 4935 case JUMP_TABLE_DATA:
2f937369 4936 case SEQUENCE:
5b0264cb 4937 gcc_unreachable ();
2f937369
DM
4938 break;
4939#endif
23b2ce53 4940
2f937369
DM
4941 default:
4942 last = make_insn_raw (x);
4943 add_insn (last);
4944 break;
23b2ce53
RS
4945 }
4946
4947 return last;
4948}
4949
b5b8b0ac
AO
4950/* Make an insn of code DEBUG_INSN with pattern X
4951 and add it to the end of the doubly-linked list. */
4952
cd459bf8 4953rtx_insn *
b5b8b0ac
AO
4954emit_debug_insn (rtx x)
4955{
cd459bf8
DM
4956 rtx_insn *last = get_last_insn ();
4957 rtx_insn *insn;
b5b8b0ac
AO
4958
4959 if (x == NULL_RTX)
4960 return last;
4961
4962 switch (GET_CODE (x))
4963 {
4964 case DEBUG_INSN:
4965 case INSN:
4966 case JUMP_INSN:
4967 case CALL_INSN:
4968 case CODE_LABEL:
4969 case BARRIER:
4970 case NOTE:
cd459bf8 4971 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
4972 while (insn)
4973 {
cd459bf8 4974 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
4975 add_insn (insn);
4976 last = insn;
4977 insn = next;
4978 }
4979 break;
4980
4981#ifdef ENABLE_RTL_CHECKING
39718607 4982 case JUMP_TABLE_DATA:
b5b8b0ac
AO
4983 case SEQUENCE:
4984 gcc_unreachable ();
4985 break;
4986#endif
4987
4988 default:
4989 last = make_debug_insn_raw (x);
4990 add_insn (last);
4991 break;
4992 }
4993
4994 return last;
4995}
4996
2f937369
DM
4997/* Make an insn of code JUMP_INSN with pattern X
4998 and add it to the end of the doubly-linked list. */
23b2ce53 4999
cd459bf8 5000rtx_insn *
502b8322 5001emit_jump_insn (rtx x)
23b2ce53 5002{
cd459bf8
DM
5003 rtx_insn *last = NULL;
5004 rtx_insn *insn;
23b2ce53 5005
2f937369 5006 switch (GET_CODE (x))
23b2ce53 5007 {
b5b8b0ac 5008 case DEBUG_INSN:
2f937369
DM
5009 case INSN:
5010 case JUMP_INSN:
5011 case CALL_INSN:
5012 case CODE_LABEL:
5013 case BARRIER:
5014 case NOTE:
cd459bf8 5015 insn = as_a <rtx_insn *> (x);
2f937369
DM
5016 while (insn)
5017 {
cd459bf8 5018 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5019 add_insn (insn);
5020 last = insn;
5021 insn = next;
5022 }
5023 break;
e0a5c5eb 5024
2f937369 5025#ifdef ENABLE_RTL_CHECKING
39718607 5026 case JUMP_TABLE_DATA:
2f937369 5027 case SEQUENCE:
5b0264cb 5028 gcc_unreachable ();
2f937369
DM
5029 break;
5030#endif
e0a5c5eb 5031
2f937369
DM
5032 default:
5033 last = make_jump_insn_raw (x);
5034 add_insn (last);
5035 break;
3c030e88 5036 }
e0a5c5eb
RS
5037
5038 return last;
5039}
5040
2f937369 5041/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5042 and add it to the end of the doubly-linked list. */
5043
cd459bf8 5044rtx_insn *
502b8322 5045emit_call_insn (rtx x)
23b2ce53 5046{
cd459bf8 5047 rtx_insn *insn;
2f937369
DM
5048
5049 switch (GET_CODE (x))
23b2ce53 5050 {
b5b8b0ac 5051 case DEBUG_INSN:
2f937369
DM
5052 case INSN:
5053 case JUMP_INSN:
5054 case CALL_INSN:
5055 case CODE_LABEL:
5056 case BARRIER:
5057 case NOTE:
5058 insn = emit_insn (x);
5059 break;
23b2ce53 5060
2f937369
DM
5061#ifdef ENABLE_RTL_CHECKING
5062 case SEQUENCE:
39718607 5063 case JUMP_TABLE_DATA:
5b0264cb 5064 gcc_unreachable ();
2f937369
DM
5065 break;
5066#endif
23b2ce53 5067
2f937369
DM
5068 default:
5069 insn = make_call_insn_raw (x);
23b2ce53 5070 add_insn (insn);
2f937369 5071 break;
23b2ce53 5072 }
2f937369
DM
5073
5074 return insn;
23b2ce53
RS
5075}
5076
5077/* Add the label LABEL to the end of the doubly-linked list. */
5078
1476d1bd
MM
5079rtx_code_label *
5080emit_label (rtx uncast_label)
23b2ce53 5081{
1476d1bd
MM
5082 rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5083
468660d3
SB
5084 gcc_checking_assert (INSN_UID (label) == 0);
5085 INSN_UID (label) = cur_insn_uid++;
1476d1bd
MM
5086 add_insn (label);
5087 return label;
23b2ce53
RS
5088}
5089
39718607
SB
5090/* Make an insn of code JUMP_TABLE_DATA
5091 and add it to the end of the doubly-linked list. */
5092
4598afdd 5093rtx_jump_table_data *
39718607
SB
5094emit_jump_table_data (rtx table)
5095{
4598afdd
DM
5096 rtx_jump_table_data *jump_table_data =
5097 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5098 INSN_UID (jump_table_data) = cur_insn_uid++;
5099 PATTERN (jump_table_data) = table;
5100 BLOCK_FOR_INSN (jump_table_data) = NULL;
5101 add_insn (jump_table_data);
5102 return jump_table_data;
5103}
5104
23b2ce53
RS
5105/* Make an insn of code BARRIER
5106 and add it to the end of the doubly-linked list. */
5107
cd459bf8 5108rtx_barrier *
502b8322 5109emit_barrier (void)
23b2ce53 5110{
cd459bf8 5111 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5112 INSN_UID (barrier) = cur_insn_uid++;
5113 add_insn (barrier);
5114 return barrier;
5115}
5116
5f2fc772 5117/* Emit a copy of note ORIG. */
502b8322 5118
66e8df53
DM
5119rtx_note *
5120emit_note_copy (rtx_note *orig)
5f2fc772 5121{
96fba521 5122 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5123 rtx_note *note = make_note_raw (kind);
5f2fc772 5124 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5125 add_insn (note);
2e040219 5126 return note;
23b2ce53
RS
5127}
5128
2e040219
NS
5129/* Make an insn of code NOTE or type NOTE_NO
5130 and add it to the end of the doubly-linked list. */
23b2ce53 5131
66e8df53 5132rtx_note *
a38e7aa5 5133emit_note (enum insn_note kind)
23b2ce53 5134{
66e8df53 5135 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5136 add_insn (note);
5137 return note;
5138}
5139
c41c1387
RS
5140/* Emit a clobber of lvalue X. */
5141
cd459bf8 5142rtx_insn *
c41c1387
RS
5143emit_clobber (rtx x)
5144{
5145 /* CONCATs should not appear in the insn stream. */
5146 if (GET_CODE (x) == CONCAT)
5147 {
5148 emit_clobber (XEXP (x, 0));
5149 return emit_clobber (XEXP (x, 1));
5150 }
5151 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5152}
5153
5154/* Return a sequence of insns to clobber lvalue X. */
5155
cd459bf8 5156rtx_insn *
c41c1387
RS
5157gen_clobber (rtx x)
5158{
cd459bf8 5159 rtx_insn *seq;
c41c1387
RS
5160
5161 start_sequence ();
5162 emit_clobber (x);
5163 seq = get_insns ();
5164 end_sequence ();
5165 return seq;
5166}
5167
5168/* Emit a use of rvalue X. */
5169
cd459bf8 5170rtx_insn *
c41c1387
RS
5171emit_use (rtx x)
5172{
5173 /* CONCATs should not appear in the insn stream. */
5174 if (GET_CODE (x) == CONCAT)
5175 {
5176 emit_use (XEXP (x, 0));
5177 return emit_use (XEXP (x, 1));
5178 }
5179 return emit_insn (gen_rtx_USE (VOIDmode, x));
5180}
5181
5182/* Return a sequence of insns to use rvalue X. */
5183
cd459bf8 5184rtx_insn *
c41c1387
RS
5185gen_use (rtx x)
5186{
cd459bf8 5187 rtx_insn *seq;
c41c1387
RS
5188
5189 start_sequence ();
5190 emit_use (x);
5191 seq = get_insns ();
5192 end_sequence ();
5193 return seq;
5194}
5195
c8912e53
RS
5196/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5197 Return the set in INSN that such notes describe, or NULL if the notes
5198 have no meaning for INSN. */
5199
5200rtx
5201set_for_reg_notes (rtx insn)
5202{
5203 rtx pat, reg;
5204
5205 if (!INSN_P (insn))
5206 return NULL_RTX;
5207
5208 pat = PATTERN (insn);
5209 if (GET_CODE (pat) == PARALLEL)
5210 {
5211 /* We do not use single_set because that ignores SETs of unused
5212 registers. REG_EQUAL and REG_EQUIV notes really do require the
5213 PARALLEL to have a single SET. */
5214 if (multiple_sets (insn))
5215 return NULL_RTX;
5216 pat = XVECEXP (pat, 0, 0);
5217 }
5218
5219 if (GET_CODE (pat) != SET)
5220 return NULL_RTX;
5221
5222 reg = SET_DEST (pat);
5223
5224 /* Notes apply to the contents of a STRICT_LOW_PART. */
5225 if (GET_CODE (reg) == STRICT_LOW_PART)
5226 reg = XEXP (reg, 0);
5227
5228 /* Check that we have a register. */
5229 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5230 return NULL_RTX;
5231
5232 return pat;
5233}
5234
87b47c85 5235/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5236 note of this type already exists, remove it first. */
87b47c85 5237
3d238248 5238rtx
502b8322 5239set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5240{
5241 rtx note = find_reg_note (insn, kind, NULL_RTX);
5242
52488da1
JW
5243 switch (kind)
5244 {
5245 case REG_EQUAL:
5246 case REG_EQUIV:
c8912e53
RS
5247 if (!set_for_reg_notes (insn))
5248 return NULL_RTX;
52488da1
JW
5249
5250 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5251 It serves no useful purpose and breaks eliminate_regs. */
5252 if (GET_CODE (datum) == ASM_OPERANDS)
5253 return NULL_RTX;
109374e2
RS
5254
5255 /* Notes with side effects are dangerous. Even if the side-effect
5256 initially mirrors one in PATTERN (INSN), later optimizations
5257 might alter the way that the final register value is calculated
5258 and so move or alter the side-effect in some way. The note would
5259 then no longer be a valid substitution for SET_SRC. */
5260 if (side_effects_p (datum))
5261 return NULL_RTX;
52488da1
JW
5262 break;
5263
5264 default:
5265 break;
5266 }
3d238248 5267
c8912e53
RS
5268 if (note)
5269 XEXP (note, 0) = datum;
5270 else
5271 {
5272 add_reg_note (insn, kind, datum);
5273 note = REG_NOTES (insn);
5274 }
6fb5fa3c
DB
5275
5276 switch (kind)
3d238248 5277 {
6fb5fa3c
DB
5278 case REG_EQUAL:
5279 case REG_EQUIV:
b2908ba6 5280 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5281 break;
5282 default:
5283 break;
3d238248 5284 }
87b47c85 5285
c8912e53 5286 return note;
87b47c85 5287}
7543f918
JR
5288
5289/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5290rtx
5291set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5292{
c8912e53 5293 rtx set = set_for_reg_notes (insn);
7543f918
JR
5294
5295 if (set && SET_DEST (set) == dst)
5296 return set_unique_reg_note (insn, kind, datum);
5297 return NULL_RTX;
5298}
23b2ce53 5299\f
9d8895c9
RS
5300/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5301 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5302 is true.
5303
23b2ce53
RS
5304 If X is a label, it is simply added into the insn chain. */
5305
cd459bf8 5306rtx_insn *
9d8895c9 5307emit (rtx x, bool allow_barrier_p)
23b2ce53
RS
5308{
5309 enum rtx_code code = classify_insn (x);
5310
5b0264cb 5311 switch (code)
23b2ce53 5312 {
5b0264cb
NS
5313 case CODE_LABEL:
5314 return emit_label (x);
5315 case INSN:
5316 return emit_insn (x);
5317 case JUMP_INSN:
5318 {
cd459bf8 5319 rtx_insn *insn = emit_jump_insn (x);
9d8895c9
RS
5320 if (allow_barrier_p
5321 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5b0264cb
NS
5322 return emit_barrier ();
5323 return insn;
5324 }
5325 case CALL_INSN:
5326 return emit_call_insn (x);
b5b8b0ac
AO
5327 case DEBUG_INSN:
5328 return emit_debug_insn (x);
5b0264cb
NS
5329 default:
5330 gcc_unreachable ();
23b2ce53 5331 }
23b2ce53
RS
5332}
5333\f
e2500fed 5334/* Space for free sequence stack entries. */
1431042e 5335static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5336
4dfa0342
RH
5337/* Begin emitting insns to a sequence. If this sequence will contain
5338 something that might cause the compiler to pop arguments to function
5339 calls (because those pops have previously been deferred; see
5340 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5341 before calling this function. That will ensure that the deferred
5342 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5343
5344void
502b8322 5345start_sequence (void)
23b2ce53
RS
5346{
5347 struct sequence_stack *tem;
5348
e2500fed
GK
5349 if (free_sequence_stack != NULL)
5350 {
5351 tem = free_sequence_stack;
5352 free_sequence_stack = tem->next;
5353 }
5354 else
766090c2 5355 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5356
614d5bd8 5357 tem->next = get_current_sequence ()->next;
5936d944
JH
5358 tem->first = get_insns ();
5359 tem->last = get_last_insn ();
614d5bd8 5360 get_current_sequence ()->next = tem;
23b2ce53 5361
5936d944
JH
5362 set_first_insn (0);
5363 set_last_insn (0);
23b2ce53
RS
5364}
5365
5c7a310f
MM
5366/* Set up the insn chain starting with FIRST as the current sequence,
5367 saving the previously current one. See the documentation for
5368 start_sequence for more information about how to use this function. */
23b2ce53
RS
5369
5370void
fee3e72c 5371push_to_sequence (rtx_insn *first)
23b2ce53 5372{
fee3e72c 5373 rtx_insn *last;
23b2ce53
RS
5374
5375 start_sequence ();
5376
e84a58ff
EB
5377 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5378 ;
23b2ce53 5379
5936d944
JH
5380 set_first_insn (first);
5381 set_last_insn (last);
23b2ce53
RS
5382}
5383
bb27eeda
SE
5384/* Like push_to_sequence, but take the last insn as an argument to avoid
5385 looping through the list. */
5386
5387void
fee3e72c 5388push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5389{
5390 start_sequence ();
5391
5936d944
JH
5392 set_first_insn (first);
5393 set_last_insn (last);
bb27eeda
SE
5394}
5395
f15ae3a1
TW
5396/* Set up the outer-level insn chain
5397 as the current sequence, saving the previously current one. */
5398
5399void
502b8322 5400push_topmost_sequence (void)
f15ae3a1 5401{
614d5bd8 5402 struct sequence_stack *top;
f15ae3a1
TW
5403
5404 start_sequence ();
5405
614d5bd8 5406 top = get_topmost_sequence ();
5936d944
JH
5407 set_first_insn (top->first);
5408 set_last_insn (top->last);
f15ae3a1
TW
5409}
5410
5411/* After emitting to the outer-level insn chain, update the outer-level
5412 insn chain, and restore the previous saved state. */
5413
5414void
502b8322 5415pop_topmost_sequence (void)
f15ae3a1 5416{
614d5bd8 5417 struct sequence_stack *top;
f15ae3a1 5418
614d5bd8 5419 top = get_topmost_sequence ();
5936d944
JH
5420 top->first = get_insns ();
5421 top->last = get_last_insn ();
f15ae3a1
TW
5422
5423 end_sequence ();
5424}
5425
23b2ce53
RS
5426/* After emitting to a sequence, restore previous saved state.
5427
5c7a310f 5428 To get the contents of the sequence just made, you must call
2f937369 5429 `get_insns' *before* calling here.
5c7a310f
MM
5430
5431 If the compiler might have deferred popping arguments while
5432 generating this sequence, and this sequence will not be immediately
5433 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5434 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5435 pops are inserted into this sequence, and not into some random
5436 location in the instruction stream. See INHIBIT_DEFER_POP for more
5437 information about deferred popping of arguments. */
23b2ce53
RS
5438
5439void
502b8322 5440end_sequence (void)
23b2ce53 5441{
614d5bd8 5442 struct sequence_stack *tem = get_current_sequence ()->next;
23b2ce53 5443
5936d944
JH
5444 set_first_insn (tem->first);
5445 set_last_insn (tem->last);
614d5bd8 5446 get_current_sequence ()->next = tem->next;
23b2ce53 5447
e2500fed
GK
5448 memset (tem, 0, sizeof (*tem));
5449 tem->next = free_sequence_stack;
5450 free_sequence_stack = tem;
23b2ce53
RS
5451}
5452
5453/* Return 1 if currently emitting into a sequence. */
5454
5455int
502b8322 5456in_sequence_p (void)
23b2ce53 5457{
614d5bd8 5458 return get_current_sequence ()->next != 0;
23b2ce53 5459}
23b2ce53 5460\f
59ec66dc
MM
5461/* Put the various virtual registers into REGNO_REG_RTX. */
5462
2bbdec73 5463static void
bd60bab2 5464init_virtual_regs (void)
59ec66dc 5465{
bd60bab2
JH
5466 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5467 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5468 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5469 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5470 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5471 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5472 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5473}
5474
da43a810
BS
5475\f
5476/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5477static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5478static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5479static int copy_insn_n_scratches;
5480
5481/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5482 copied an ASM_OPERANDS.
5483 In that case, it is the original input-operand vector. */
5484static rtvec orig_asm_operands_vector;
5485
5486/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5487 copied an ASM_OPERANDS.
5488 In that case, it is the copied input-operand vector. */
5489static rtvec copy_asm_operands_vector;
5490
5491/* Likewise for the constraints vector. */
5492static rtvec orig_asm_constraints_vector;
5493static rtvec copy_asm_constraints_vector;
5494
5495/* Recursively create a new copy of an rtx for copy_insn.
5496 This function differs from copy_rtx in that it handles SCRATCHes and
5497 ASM_OPERANDs properly.
5498 Normally, this function is not used directly; use copy_insn as front end.
5499 However, you could first copy an insn pattern with copy_insn and then use
5500 this function afterwards to properly copy any REG_NOTEs containing
5501 SCRATCHes. */
5502
5503rtx
502b8322 5504copy_insn_1 (rtx orig)
da43a810 5505{
b3694847
SS
5506 rtx copy;
5507 int i, j;
5508 RTX_CODE code;
5509 const char *format_ptr;
da43a810 5510
cd9c1ca8
RH
5511 if (orig == NULL)
5512 return NULL;
5513
da43a810
BS
5514 code = GET_CODE (orig);
5515
5516 switch (code)
5517 {
5518 case REG:
a52a87c3 5519 case DEBUG_EXPR:
d8116890 5520 CASE_CONST_ANY:
da43a810
BS
5521 case SYMBOL_REF:
5522 case CODE_LABEL:
5523 case PC:
5524 case CC0:
276e0224 5525 case RETURN:
26898771 5526 case SIMPLE_RETURN:
da43a810 5527 return orig;
3e89ed8d 5528 case CLOBBER:
c5c5ba89
JH
5529 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5530 clobbers or clobbers of hard registers that originated as pseudos.
5531 This is needed to allow safe register renaming. */
5532 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5533 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
3e89ed8d
JH
5534 return orig;
5535 break;
da43a810
BS
5536
5537 case SCRATCH:
5538 for (i = 0; i < copy_insn_n_scratches; i++)
5539 if (copy_insn_scratch_in[i] == orig)
5540 return copy_insn_scratch_out[i];
5541 break;
5542
5543 case CONST:
6fb5fa3c 5544 if (shared_const_p (orig))
da43a810
BS
5545 return orig;
5546 break;
750c9258 5547
da43a810
BS
5548 /* A MEM with a constant address is not sharable. The problem is that
5549 the constant address may need to be reloaded. If the mem is shared,
5550 then reloading one copy of this mem will cause all copies to appear
5551 to have been reloaded. */
5552
5553 default:
5554 break;
5555 }
5556
aacd3885
RS
5557 /* Copy the various flags, fields, and other information. We assume
5558 that all fields need copying, and then clear the fields that should
da43a810
BS
5559 not be copied. That is the sensible default behavior, and forces
5560 us to explicitly document why we are *not* copying a flag. */
aacd3885 5561 copy = shallow_copy_rtx (orig);
da43a810
BS
5562
5563 /* We do not copy the USED flag, which is used as a mark bit during
5564 walks over the RTL. */
2adc7f12 5565 RTX_FLAG (copy, used) = 0;
da43a810
BS
5566
5567 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5568 if (INSN_P (orig))
da43a810 5569 {
2adc7f12
JJ
5570 RTX_FLAG (copy, jump) = 0;
5571 RTX_FLAG (copy, call) = 0;
5572 RTX_FLAG (copy, frame_related) = 0;
da43a810 5573 }
750c9258 5574
da43a810
BS
5575 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5576
5577 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5578 switch (*format_ptr++)
5579 {
5580 case 'e':
5581 if (XEXP (orig, i) != NULL)
5582 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5583 break;
da43a810 5584
aacd3885
RS
5585 case 'E':
5586 case 'V':
5587 if (XVEC (orig, i) == orig_asm_constraints_vector)
5588 XVEC (copy, i) = copy_asm_constraints_vector;
5589 else if (XVEC (orig, i) == orig_asm_operands_vector)
5590 XVEC (copy, i) = copy_asm_operands_vector;
5591 else if (XVEC (orig, i) != NULL)
5592 {
5593 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5594 for (j = 0; j < XVECLEN (copy, i); j++)
5595 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5596 }
5597 break;
da43a810 5598
aacd3885
RS
5599 case 't':
5600 case 'w':
5601 case 'i':
5602 case 's':
5603 case 'S':
5604 case 'u':
5605 case '0':
5606 /* These are left unchanged. */
5607 break;
da43a810 5608
aacd3885
RS
5609 default:
5610 gcc_unreachable ();
5611 }
da43a810
BS
5612
5613 if (code == SCRATCH)
5614 {
5615 i = copy_insn_n_scratches++;
5b0264cb 5616 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5617 copy_insn_scratch_in[i] = orig;
5618 copy_insn_scratch_out[i] = copy;
5619 }
5620 else if (code == ASM_OPERANDS)
5621 {
6462bb43
AO
5622 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5623 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5624 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5625 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5626 }
5627
5628 return copy;
5629}
5630
5631/* Create a new copy of an rtx.
5632 This function differs from copy_rtx in that it handles SCRATCHes and
5633 ASM_OPERANDs properly.
5634 INSN doesn't really have to be a full INSN; it could be just the
5635 pattern. */
5636rtx
502b8322 5637copy_insn (rtx insn)
da43a810
BS
5638{
5639 copy_insn_n_scratches = 0;
5640 orig_asm_operands_vector = 0;
5641 orig_asm_constraints_vector = 0;
5642 copy_asm_operands_vector = 0;
5643 copy_asm_constraints_vector = 0;
5644 return copy_insn_1 (insn);
5645}
59ec66dc 5646
8e383849
JR
5647/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5648 on that assumption that INSN itself remains in its original place. */
5649
f8f0516e
DM
5650rtx_insn *
5651copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5652{
5653 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5654 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5655 INSN_UID (insn) = cur_insn_uid++;
5656 return insn;
5657}
5658
23b2ce53
RS
5659/* Initialize data structures and variables in this file
5660 before generating rtl for each function. */
5661
5662void
502b8322 5663init_emit (void)
23b2ce53 5664{
5936d944
JH
5665 set_first_insn (NULL);
5666 set_last_insn (NULL);
b5b8b0ac
AO
5667 if (MIN_NONDEBUG_INSN_UID)
5668 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5669 else
5670 cur_insn_uid = 1;
5671 cur_debug_insn_uid = 1;
23b2ce53 5672 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5673 first_label_num = label_num;
614d5bd8 5674 get_current_sequence ()->next = NULL;
23b2ce53 5675
23b2ce53
RS
5676 /* Init the tables that describe all the pseudo regs. */
5677
3e029763 5678 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5679
3e029763 5680 crtl->emit.regno_pointer_align
1b4572a8 5681 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5682
766090c2 5683 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5684
e50126e8 5685 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5686 memcpy (regno_reg_rtx,
5fb0e246 5687 initial_regno_reg_rtx,
6cde4876 5688 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5689
23b2ce53 5690 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5691 init_virtual_regs ();
740ab4a2
RK
5692
5693 /* Indicate that the virtual registers and stack locations are
5694 all pointers. */
3502dc9c
JDA
5695 REG_POINTER (stack_pointer_rtx) = 1;
5696 REG_POINTER (frame_pointer_rtx) = 1;
5697 REG_POINTER (hard_frame_pointer_rtx) = 1;
5698 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5699
3502dc9c
JDA
5700 REG_POINTER (virtual_incoming_args_rtx) = 1;
5701 REG_POINTER (virtual_stack_vars_rtx) = 1;
5702 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5703 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5704 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5705
86fe05e0 5706#ifdef STACK_BOUNDARY
bdb429a5
RK
5707 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5708 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5709 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5710 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5711
5712 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5713 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5714 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5715 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5716 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5717#endif
5718
5e82e7bd
JVA
5719#ifdef INIT_EXPANDERS
5720 INIT_EXPANDERS;
5721#endif
23b2ce53
RS
5722}
5723
a73b091d 5724/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5725
5726static rtx
ef4bddc2 5727gen_const_vector (machine_mode mode, int constant)
69ef87e2
AH
5728{
5729 rtx tem;
5730 rtvec v;
5731 int units, i;
ef4bddc2 5732 machine_mode inner;
69ef87e2
AH
5733
5734 units = GET_MODE_NUNITS (mode);
5735 inner = GET_MODE_INNER (mode);
5736
15ed7b52
JG
5737 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5738
69ef87e2
AH
5739 v = rtvec_alloc (units);
5740
a73b091d
JW
5741 /* We need to call this function after we set the scalar const_tiny_rtx
5742 entries. */
5743 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5744
5745 for (i = 0; i < units; ++i)
a73b091d 5746 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5747
a06e3c40 5748 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5749 return tem;
5750}
5751
a06e3c40 5752/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5753 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5754rtx
ef4bddc2 5755gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
a06e3c40 5756{
ef4bddc2 5757 machine_mode inner = GET_MODE_INNER (mode);
a73b091d
JW
5758 int nunits = GET_MODE_NUNITS (mode);
5759 rtx x;
a06e3c40
R
5760 int i;
5761
a73b091d
JW
5762 /* Check to see if all of the elements have the same value. */
5763 x = RTVEC_ELT (v, nunits - 1);
5764 for (i = nunits - 2; i >= 0; i--)
5765 if (RTVEC_ELT (v, i) != x)
5766 break;
5767
5768 /* If the values are all the same, check to see if we can use one of the
5769 standard constant vectors. */
5770 if (i == -1)
5771 {
5772 if (x == CONST0_RTX (inner))
5773 return CONST0_RTX (mode);
5774 else if (x == CONST1_RTX (inner))
5775 return CONST1_RTX (mode);
e7c82a99
JJ
5776 else if (x == CONSTM1_RTX (inner))
5777 return CONSTM1_RTX (mode);
a73b091d
JW
5778 }
5779
5780 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5781}
5782
b5deb7b6
SL
5783/* Initialise global register information required by all functions. */
5784
5785void
5786init_emit_regs (void)
5787{
5788 int i;
ef4bddc2 5789 machine_mode mode;
1c3f523e 5790 mem_attrs *attrs;
b5deb7b6
SL
5791
5792 /* Reset register attributes */
aebf76a2 5793 reg_attrs_htab->empty ();
b5deb7b6
SL
5794
5795 /* We need reg_raw_mode, so initialize the modes now. */
5796 init_reg_modes_target ();
5797
5798 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5799 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5800 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5801 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5802 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5803 virtual_incoming_args_rtx =
5804 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5805 virtual_stack_vars_rtx =
5806 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5807 virtual_stack_dynamic_rtx =
5808 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5809 virtual_outgoing_args_rtx =
5810 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5811 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5812 virtual_preferred_stack_boundary_rtx =
5813 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5814
5815 /* Initialize RTL for commonly used hard registers. These are
5816 copied into regno_reg_rtx as we begin to compile each function. */
5817 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5818 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5819
5820#ifdef RETURN_ADDRESS_POINTER_REGNUM
5821 return_address_pointer_rtx
5822 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5823#endif
5824
ca72dad5 5825 pic_offset_table_rtx = NULL_RTX;
b5deb7b6
SL
5826 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5827 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
1c3f523e
RS
5828
5829 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5830 {
ef4bddc2 5831 mode = (machine_mode) i;
766090c2 5832 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
5833 attrs->align = BITS_PER_UNIT;
5834 attrs->addrspace = ADDR_SPACE_GENERIC;
5835 if (mode != BLKmode)
5836 {
754c3d5d
RS
5837 attrs->size_known_p = true;
5838 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5839 if (STRICT_ALIGNMENT)
5840 attrs->align = GET_MODE_ALIGNMENT (mode);
5841 }
5842 mode_mem_attrs[i] = attrs;
5843 }
b5deb7b6
SL
5844}
5845
aa3a12d6
RS
5846/* Initialize global machine_mode variables. */
5847
5848void
5849init_derived_machine_modes (void)
5850{
5851 byte_mode = VOIDmode;
5852 word_mode = VOIDmode;
5853
ef4bddc2 5854 for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
aa3a12d6
RS
5855 mode != VOIDmode;
5856 mode = GET_MODE_WIDER_MODE (mode))
5857 {
5858 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5859 && byte_mode == VOIDmode)
5860 byte_mode = mode;
5861
5862 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5863 && word_mode == VOIDmode)
5864 word_mode = mode;
5865 }
5866
5867 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5868}
5869
2d888286 5870/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5871
5872void
2d888286 5873init_emit_once (void)
23b2ce53
RS
5874{
5875 int i;
ef4bddc2
RS
5876 machine_mode mode;
5877 machine_mode double_mode;
23b2ce53 5878
807e902e
KZ
5879 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5880 CONST_FIXED, and memory attribute hash tables. */
aebf76a2 5881 const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
173b24b9 5882
807e902e 5883#if TARGET_SUPPORTS_WIDE_INT
aebf76a2 5884 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
807e902e 5885#endif
aebf76a2 5886 const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5692c7bc 5887
aebf76a2 5888 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
091a3ac7 5889
aebf76a2 5890 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
67673f5c 5891
5da077de 5892#ifdef INIT_EXPANDERS
414c4dc4
NC
5893 /* This is to initialize {init|mark|free}_machine_status before the first
5894 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5895 end which calls push_function_context_to before the first call to
5da077de
AS
5896 init_function_start. */
5897 INIT_EXPANDERS;
5898#endif
5899
23b2ce53
RS
5900 /* Create the unique rtx's for certain rtx codes and operand values. */
5901
a2a8cc44 5902 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5903 tries to use these variables. */
23b2ce53 5904 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5905 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5906 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5907
68d75312
JC
5908 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5909 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5910 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5911 else
3b80f6ca 5912 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5913
aa3a12d6
RS
5914 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5915
807e902e
KZ
5916 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5917 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5918 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
5919
5920 dconstm1 = dconst1;
5921 dconstm1.sign = 1;
03f2ea93
RS
5922
5923 dconsthalf = dconst1;
1e92bbb9 5924 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5925
e7c82a99 5926 for (i = 0; i < 3; i++)
23b2ce53 5927 {
aefa9d43 5928 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5929 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5930
15ed7b52
JG
5931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5932 mode != VOIDmode;
5933 mode = GET_MODE_WIDER_MODE (mode))
5934 const_tiny_rtx[i][(int) mode] =
5935 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5936
5937 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5938 mode != VOIDmode;
23b2ce53 5939 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5940 const_tiny_rtx[i][(int) mode] =
5941 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5942
906c4e36 5943 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5944
15ed7b52
JG
5945 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5946 mode != VOIDmode;
23b2ce53 5947 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5948 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5949
ede6c734
MS
5950 for (mode = MIN_MODE_PARTIAL_INT;
5951 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 5952 mode = (machine_mode)((int)(mode) + 1))
33d3e559 5953 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5954 }
5955
e7c82a99
JJ
5956 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5957
5958 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5959 mode != VOIDmode;
5960 mode = GET_MODE_WIDER_MODE (mode))
5961 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5962
ede6c734
MS
5963 for (mode = MIN_MODE_PARTIAL_INT;
5964 mode <= MAX_MODE_PARTIAL_INT;
ef4bddc2 5965 mode = (machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5966 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5967
e90721b1
AP
5968 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5969 mode != VOIDmode;
5970 mode = GET_MODE_WIDER_MODE (mode))
5971 {
5972 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5973 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5974 }
5975
5976 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5977 mode != VOIDmode;
5978 mode = GET_MODE_WIDER_MODE (mode))
5979 {
5980 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5981 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5982 }
5983
69ef87e2
AH
5984 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5985 mode != VOIDmode;
5986 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5987 {
5988 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5989 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 5990 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 5991 }
69ef87e2
AH
5992
5993 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5994 mode != VOIDmode;
5995 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5996 {
5997 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5998 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5999 }
69ef87e2 6000
325217ed
CF
6001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6002 mode != VOIDmode;
6003 mode = GET_MODE_WIDER_MODE (mode))
6004 {
c3284718
RS
6005 FCONST0 (mode).data.high = 0;
6006 FCONST0 (mode).data.low = 0;
6007 FCONST0 (mode).mode = mode;
091a3ac7
CF
6008 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6009 FCONST0 (mode), mode);
325217ed
CF
6010 }
6011
6012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6013 mode != VOIDmode;
6014 mode = GET_MODE_WIDER_MODE (mode))
6015 {
c3284718
RS
6016 FCONST0 (mode).data.high = 0;
6017 FCONST0 (mode).data.low = 0;
6018 FCONST0 (mode).mode = mode;
091a3ac7
CF
6019 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6020 FCONST0 (mode), mode);
325217ed
CF
6021 }
6022
6023 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6024 mode != VOIDmode;
6025 mode = GET_MODE_WIDER_MODE (mode))
6026 {
c3284718
RS
6027 FCONST0 (mode).data.high = 0;
6028 FCONST0 (mode).data.low = 0;
6029 FCONST0 (mode).mode = mode;
091a3ac7
CF
6030 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6031 FCONST0 (mode), mode);
325217ed
CF
6032
6033 /* We store the value 1. */
c3284718
RS
6034 FCONST1 (mode).data.high = 0;
6035 FCONST1 (mode).data.low = 0;
6036 FCONST1 (mode).mode = mode;
6037 FCONST1 (mode).data
9be0ac8c
LC
6038 = double_int_one.lshift (GET_MODE_FBIT (mode),
6039 HOST_BITS_PER_DOUBLE_INT,
6040 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6041 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6042 FCONST1 (mode), mode);
325217ed
CF
6043 }
6044
6045 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6046 mode != VOIDmode;
6047 mode = GET_MODE_WIDER_MODE (mode))
6048 {
c3284718
RS
6049 FCONST0 (mode).data.high = 0;
6050 FCONST0 (mode).data.low = 0;
6051 FCONST0 (mode).mode = mode;
091a3ac7
CF
6052 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6053 FCONST0 (mode), mode);
325217ed
CF
6054
6055 /* We store the value 1. */
c3284718
RS
6056 FCONST1 (mode).data.high = 0;
6057 FCONST1 (mode).data.low = 0;
6058 FCONST1 (mode).mode = mode;
6059 FCONST1 (mode).data
9be0ac8c
LC
6060 = double_int_one.lshift (GET_MODE_FBIT (mode),
6061 HOST_BITS_PER_DOUBLE_INT,
6062 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6063 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6064 FCONST1 (mode), mode);
6065 }
6066
6067 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6068 mode != VOIDmode;
6069 mode = GET_MODE_WIDER_MODE (mode))
6070 {
6071 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6072 }
6073
6074 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6075 mode != VOIDmode;
6076 mode = GET_MODE_WIDER_MODE (mode))
6077 {
6078 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6079 }
6080
6081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6082 mode != VOIDmode;
6083 mode = GET_MODE_WIDER_MODE (mode))
6084 {
6085 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6086 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6087 }
6088
6089 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6090 mode != VOIDmode;
6091 mode = GET_MODE_WIDER_MODE (mode))
6092 {
6093 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6094 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6095 }
6096
dbbbbf3b 6097 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
ef4bddc2 6098 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
dbbbbf3b 6099 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6100
f0417c82
RH
6101 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6102 if (STORE_FLAG_VALUE == 1)
6103 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91 6104
d5e254e1
IE
6105 for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6106 mode != VOIDmode;
6107 mode = GET_MODE_WIDER_MODE (mode))
6108 {
6109 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6110 const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6111 }
6112
ca4adc91
RS
6113 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6114 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6115 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6116 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
1476d1bd
MM
6117 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6118 /*prev_insn=*/NULL,
6119 /*next_insn=*/NULL,
6120 /*bb=*/NULL,
6121 /*pattern=*/NULL_RTX,
6122 /*location=*/-1,
6123 CODE_FOR_nothing,
6124 /*reg_notes=*/NULL_RTX);
23b2ce53 6125}
a11759a3 6126\f
969d70ca
JH
6127/* Produce exact duplicate of insn INSN after AFTER.
6128 Care updating of libcall regions if present. */
6129
cd459bf8 6130rtx_insn *
a1950df3 6131emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
969d70ca 6132{
cd459bf8
DM
6133 rtx_insn *new_rtx;
6134 rtx link;
969d70ca
JH
6135
6136 switch (GET_CODE (insn))
6137 {
6138 case INSN:
60564289 6139 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6140 break;
6141
6142 case JUMP_INSN:
60564289 6143 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6144 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6145 break;
6146
b5b8b0ac
AO
6147 case DEBUG_INSN:
6148 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6149 break;
6150
969d70ca 6151 case CALL_INSN:
60564289 6152 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6153 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6154 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6155 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6156 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6157 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6158 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6159 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6160 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6161 break;
6162
6163 default:
5b0264cb 6164 gcc_unreachable ();
969d70ca
JH
6165 }
6166
6167 /* Update LABEL_NUSES. */
60564289 6168 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6169
5368224f 6170 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6171
0a3d71f5
JW
6172 /* If the old insn is frame related, then so is the new one. This is
6173 primarily needed for IA-64 unwind info which marks epilogue insns,
6174 which may be duplicated by the basic block reordering code. */
60564289 6175 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6176
cf7c4aa6
HPN
6177 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6178 will make them. REG_LABEL_TARGETs are created there too, but are
6179 supposed to be sticky, so we copy them. */
969d70ca 6180 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6181 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
6182 {
6183 if (GET_CODE (link) == EXPR_LIST)
60564289 6184 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 6185 copy_insn_1 (XEXP (link, 0)));
969d70ca 6186 else
e5af9ddd 6187 add_shallow_copy_of_reg_note (new_rtx, link);
969d70ca
JH
6188 }
6189
60564289
KG
6190 INSN_CODE (new_rtx) = INSN_CODE (insn);
6191 return new_rtx;
969d70ca 6192}
e2500fed 6193
1431042e 6194static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d 6195rtx
ef4bddc2 6196gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
3e89ed8d
JH
6197{
6198 if (hard_reg_clobbers[mode][regno])
6199 return hard_reg_clobbers[mode][regno];
6200 else
6201 return (hard_reg_clobbers[mode][regno] =
6202 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6203}
6204
5368224f
DC
6205location_t prologue_location;
6206location_t epilogue_location;
78bde837
SB
6207
6208/* Hold current location information and last location information, so the
6209 datastructures are built lazily only when some instructions in given
6210 place are needed. */
3a50da34 6211static location_t curr_location;
78bde837 6212
5368224f 6213/* Allocate insn location datastructure. */
78bde837 6214void
5368224f 6215insn_locations_init (void)
78bde837 6216{
5368224f 6217 prologue_location = epilogue_location = 0;
78bde837 6218 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6219}
6220
6221/* At the end of emit stage, clear current location. */
6222void
5368224f 6223insn_locations_finalize (void)
78bde837 6224{
5368224f
DC
6225 epilogue_location = curr_location;
6226 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6227}
6228
6229/* Set current location. */
6230void
5368224f 6231set_curr_insn_location (location_t location)
78bde837 6232{
78bde837
SB
6233 curr_location = location;
6234}
6235
6236/* Get current location. */
6237location_t
5368224f 6238curr_insn_location (void)
78bde837
SB
6239{
6240 return curr_location;
6241}
6242
78bde837
SB
6243/* Return lexical scope block insn belongs to. */
6244tree
a1950df3 6245insn_scope (const rtx_insn *insn)
78bde837 6246{
5368224f 6247 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6248}
6249
6250/* Return line number of the statement that produced this insn. */
6251int
a1950df3 6252insn_line (const rtx_insn *insn)
78bde837 6253{
5368224f 6254 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6255}
6256
6257/* Return source file of the statement that produced this insn. */
6258const char *
a1950df3 6259insn_file (const rtx_insn *insn)
78bde837 6260{
5368224f 6261 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6262}
8930883e 6263
ffa4602f
EB
6264/* Return expanded location of the statement that produced this insn. */
6265expanded_location
a1950df3 6266insn_location (const rtx_insn *insn)
ffa4602f
EB
6267{
6268 return expand_location (INSN_LOCATION (insn));
6269}
6270
8930883e
MK
6271/* Return true if memory model MODEL requires a pre-operation (release-style)
6272 barrier or a post-operation (acquire-style) barrier. While not universal,
6273 this function matches behavior of several targets. */
6274
6275bool
6276need_atomic_barrier_p (enum memmodel model, bool pre)
6277{
40ad260d 6278 switch (model & MEMMODEL_BASE_MASK)
8930883e
MK
6279 {
6280 case MEMMODEL_RELAXED:
6281 case MEMMODEL_CONSUME:
6282 return false;
6283 case MEMMODEL_RELEASE:
6284 return pre;
6285 case MEMMODEL_ACQUIRE:
6286 return !pre;
6287 case MEMMODEL_ACQ_REL:
6288 case MEMMODEL_SEQ_CST:
6289 return true;
6290 default:
6291 gcc_unreachable ();
6292 }
6293}
6294\f
e2500fed 6295#include "gt-emit-rtl.h"