]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
Daily bump.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
23a5b65a 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
718f9c0f 38#include "diagnostic-core.h"
23b2ce53 39#include "rtl.h"
a25c7971 40#include "tree.h"
d8a2d370 41#include "varasm.h"
2fb9a547
AM
42#include "basic-block.h"
43#include "tree-eh.h"
6baf1cc8 44#include "tm_p.h"
23b2ce53
RS
45#include "flags.h"
46#include "function.h"
d8a2d370 47#include "stringpool.h"
23b2ce53
RS
48#include "expr.h"
49#include "regs.h"
aff48bca 50#include "hard-reg-set.h"
c13e8210 51#include "hashtab.h"
23b2ce53 52#include "insn-config.h"
e9a25f70 53#include "recog.h"
0dfa1860 54#include "bitmap.h"
e1772ac0 55#include "debug.h"
d23c55c2 56#include "langhooks.h"
6fb5fa3c 57#include "df.h"
b5b8b0ac 58#include "params.h"
d4ebfa65 59#include "target.h"
9b2b7279 60#include "builtins.h"
9021b8ec 61#include "rtl-iter.h"
ca695ac9 62
5fb0e246
RS
63struct target_rtl default_target_rtl;
64#if SWITCHABLE_TARGET
65struct target_rtl *this_target_rtl = &default_target_rtl;
66#endif
67
68#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
1d445e9e
ILT
70/* Commonly used modes. */
71
0f41302f
MS
72enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 74enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 75enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 76
bd60bab2
JH
77/* Datastructures maintained for currently processed function in RTL form. */
78
3e029763 79struct rtl_data x_rtl;
bd60bab2
JH
80
81/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 82 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86rtx * regno_reg_rtx;
23b2ce53
RS
87
88/* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
044b4de3 91static GTY(()) int label_num = 1;
23b2ce53 92
23b2ce53
RS
93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 97
e7c82a99 98rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 99
68d75312
JC
100rtx const_true_rtx;
101
23b2ce53
RS
102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
03f2ea93 106REAL_VALUE_TYPE dconsthalf;
23b2ce53 107
325217ed
CF
108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
23b2ce53
RS
112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
5da077de 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 118
ca4adc91
RS
119/* Standard pieces of rtx, to be substituted directly into things. */
120rtx pc_rtx;
121rtx ret_rtx;
122rtx simple_return_rtx;
123rtx cc0_rtx;
124
c13e8210
MM
125/* A hash table storing CONST_INTs whose absolute value is greater
126 than MAX_SAVED_CONST_INT. */
127
e2500fed
GK
128static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t const_int_htab;
c13e8210 130
807e902e
KZ
131static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
132 htab_t const_wide_int_htab;
133
a560d4d4
JH
134/* A hash table storing register attribute structures. */
135static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
136 htab_t reg_attrs_htab;
137
5692c7bc 138/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
139static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
140 htab_t const_double_htab;
5692c7bc 141
091a3ac7
CF
142/* A hash table storing all CONST_FIXEDs. */
143static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
144 htab_t const_fixed_htab;
145
3e029763 146#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 147#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 148#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 149
5eb2a9f2 150static void set_used_decls (tree);
502b8322
AJ
151static void mark_label_nuses (rtx);
152static hashval_t const_int_htab_hash (const void *);
153static int const_int_htab_eq (const void *, const void *);
807e902e
KZ
154#if TARGET_SUPPORTS_WIDE_INT
155static hashval_t const_wide_int_htab_hash (const void *);
156static int const_wide_int_htab_eq (const void *, const void *);
157static rtx lookup_const_wide_int (rtx);
158#endif
502b8322
AJ
159static hashval_t const_double_htab_hash (const void *);
160static int const_double_htab_eq (const void *, const void *);
161static rtx lookup_const_double (rtx);
091a3ac7
CF
162static hashval_t const_fixed_htab_hash (const void *);
163static int const_fixed_htab_eq (const void *, const void *);
164static rtx lookup_const_fixed (rtx);
502b8322
AJ
165static hashval_t reg_attrs_htab_hash (const void *);
166static int reg_attrs_htab_eq (const void *, const void *);
167static reg_attrs *get_reg_attrs (tree, int);
a73b091d 168static rtx gen_const_vector (enum machine_mode, int);
32b32b16 169static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 170
6b24c259
JH
171/* Probability of the conditional branch currently proceeded by try_split.
172 Set to -1 otherwise. */
173int split_branch_probability = -1;
ca695ac9 174\f
c13e8210
MM
175/* Returns a hash code for X (which is a really a CONST_INT). */
176
177static hashval_t
502b8322 178const_int_htab_hash (const void *x)
c13e8210 179{
f7d504c2 180 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
181}
182
cc2902df 183/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
184 CONST_INT) is the same as that given by Y (which is really a
185 HOST_WIDE_INT *). */
186
187static int
502b8322 188const_int_htab_eq (const void *x, const void *y)
c13e8210 189{
f7d504c2 190 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
191}
192
807e902e
KZ
193#if TARGET_SUPPORTS_WIDE_INT
194/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
195
196static hashval_t
197const_wide_int_htab_hash (const void *x)
198{
199 int i;
200 HOST_WIDE_INT hash = 0;
201 const_rtx xr = (const_rtx) x;
202
203 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
204 hash += CONST_WIDE_INT_ELT (xr, i);
205
206 return (hashval_t) hash;
207}
208
209/* Returns nonzero if the value represented by X (which is really a
210 CONST_WIDE_INT) is the same as that given by Y (which is really a
211 CONST_WIDE_INT). */
212
213static int
214const_wide_int_htab_eq (const void *x, const void *y)
215{
216 int i;
217 const_rtx xr = (const_rtx) x;
218 const_rtx yr = (const_rtx) y;
219 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
220 return 0;
221
222 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
223 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
224 return 0;
225
226 return 1;
227}
228#endif
229
5692c7bc
ZW
230/* Returns a hash code for X (which is really a CONST_DOUBLE). */
231static hashval_t
502b8322 232const_double_htab_hash (const void *x)
5692c7bc 233{
f7d504c2 234 const_rtx const value = (const_rtx) x;
46b33600 235 hashval_t h;
5692c7bc 236
807e902e 237 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
238 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
239 else
fe352c29 240 {
15c812e3 241 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
242 /* MODE is used in the comparison, so it should be in the hash. */
243 h ^= GET_MODE (value);
244 }
5692c7bc
ZW
245 return h;
246}
247
cc2902df 248/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
249 is the same as that represented by Y (really a ...) */
250static int
502b8322 251const_double_htab_eq (const void *x, const void *y)
5692c7bc 252{
f7d504c2 253 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
254
255 if (GET_MODE (a) != GET_MODE (b))
256 return 0;
807e902e 257 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
258 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
259 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
260 else
261 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
262 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
263}
264
091a3ac7
CF
265/* Returns a hash code for X (which is really a CONST_FIXED). */
266
267static hashval_t
268const_fixed_htab_hash (const void *x)
269{
3101faab 270 const_rtx const value = (const_rtx) x;
091a3ac7
CF
271 hashval_t h;
272
273 h = fixed_hash (CONST_FIXED_VALUE (value));
274 /* MODE is used in the comparison, so it should be in the hash. */
275 h ^= GET_MODE (value);
276 return h;
277}
278
279/* Returns nonzero if the value represented by X (really a ...)
280 is the same as that represented by Y (really a ...). */
281
282static int
283const_fixed_htab_eq (const void *x, const void *y)
284{
3101faab 285 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
286
287 if (GET_MODE (a) != GET_MODE (b))
288 return 0;
289 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
290}
291
f12144dd 292/* Return true if the given memory attributes are equal. */
c13e8210 293
96b3c03f 294bool
f12144dd 295mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 296{
96b3c03f
RB
297 if (p == q)
298 return true;
299 if (!p || !q)
300 return false;
754c3d5d
RS
301 return (p->alias == q->alias
302 && p->offset_known_p == q->offset_known_p
303 && (!p->offset_known_p || p->offset == q->offset)
304 && p->size_known_p == q->size_known_p
305 && (!p->size_known_p || p->size == q->size)
306 && p->align == q->align
09e881c9 307 && p->addrspace == q->addrspace
78b76d08
SB
308 && (p->expr == q->expr
309 || (p->expr != NULL_TREE && q->expr != NULL_TREE
310 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
311}
312
f12144dd 313/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 314
f12144dd
RS
315static void
316set_mem_attrs (rtx mem, mem_attrs *attrs)
317{
f12144dd
RS
318 /* If everything is the default, we can just clear the attributes. */
319 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
320 {
321 MEM_ATTRS (mem) = 0;
322 return;
323 }
173b24b9 324
84053e02
RB
325 if (!MEM_ATTRS (mem)
326 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 327 {
766090c2 328 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 329 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 330 }
c13e8210
MM
331}
332
a560d4d4
JH
333/* Returns a hash code for X (which is a really a reg_attrs *). */
334
335static hashval_t
502b8322 336reg_attrs_htab_hash (const void *x)
a560d4d4 337{
741ac903 338 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4 339
9841210f 340 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
341}
342
6356f892 343/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
344 reg_attrs *) is the same as that given by Y (which is also really a
345 reg_attrs *). */
346
347static int
502b8322 348reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 349{
741ac903
KG
350 const reg_attrs *const p = (const reg_attrs *) x;
351 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
352
353 return (p->decl == q->decl && p->offset == q->offset);
354}
355/* Allocate a new reg_attrs structure and insert it into the hash table if
356 one identical to it is not already in the table. We are doing this for
357 MEM of mode MODE. */
358
359static reg_attrs *
502b8322 360get_reg_attrs (tree decl, int offset)
a560d4d4
JH
361{
362 reg_attrs attrs;
363 void **slot;
364
365 /* If everything is the default, we can just return zero. */
366 if (decl == 0 && offset == 0)
367 return 0;
368
369 attrs.decl = decl;
370 attrs.offset = offset;
371
372 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
373 if (*slot == 0)
374 {
766090c2 375 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
376 memcpy (*slot, &attrs, sizeof (reg_attrs));
377 }
378
1b4572a8 379 return (reg_attrs *) *slot;
a560d4d4
JH
380}
381
6fb5fa3c
DB
382
383#if !HAVE_blockage
adddc347
HPN
384/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
385 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
386
387rtx
388gen_blockage (void)
389{
390 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
391 MEM_VOLATILE_P (x) = true;
392 return x;
393}
394#endif
395
396
08394eef
BS
397/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
398 don't attempt to share with the various global pieces of rtl (such as
399 frame_pointer_rtx). */
400
401rtx
502b8322 402gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
403{
404 rtx x = gen_rtx_raw_REG (mode, regno);
405 ORIGINAL_REGNO (x) = regno;
406 return x;
407}
408
c5c76735
JL
409/* There are some RTL codes that require special attention; the generation
410 functions do the raw handling. If you add to this list, modify
411 special_rtx in gengenrtl.c as well. */
412
38e60c55
DM
413rtx_expr_list *
414gen_rtx_EXPR_LIST (enum machine_mode mode, rtx expr, rtx expr_list)
415{
416 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
417 expr_list));
418}
419
a756c6be
DM
420rtx_insn_list *
421gen_rtx_INSN_LIST (enum machine_mode mode, rtx insn, rtx insn_list)
422{
423 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
424 insn_list));
425}
426
3b80f6ca 427rtx
502b8322 428gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 429{
c13e8210
MM
430 void **slot;
431
3b80f6ca 432 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 433 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
434
435#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
436 if (const_true_rtx && arg == STORE_FLAG_VALUE)
437 return const_true_rtx;
438#endif
439
c13e8210 440 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
441 slot = htab_find_slot_with_hash (const_int_htab, &arg,
442 (hashval_t) arg, INSERT);
29105cea 443 if (*slot == 0)
1f8f4a0b 444 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
445
446 return (rtx) *slot;
3b80f6ca
RH
447}
448
2496c7bd 449rtx
502b8322 450gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
451{
452 return GEN_INT (trunc_int_for_mode (c, mode));
453}
454
5692c7bc
ZW
455/* CONST_DOUBLEs might be created from pairs of integers, or from
456 REAL_VALUE_TYPEs. Also, their length is known only at run time,
457 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
458
459/* Determine whether REAL, a CONST_DOUBLE, already exists in the
460 hash table. If so, return its counterpart; otherwise add it
461 to the hash table and return it. */
462static rtx
502b8322 463lookup_const_double (rtx real)
5692c7bc
ZW
464{
465 void **slot = htab_find_slot (const_double_htab, real, INSERT);
466 if (*slot == 0)
467 *slot = real;
468
469 return (rtx) *slot;
470}
29105cea 471
5692c7bc
ZW
472/* Return a CONST_DOUBLE rtx for a floating-point value specified by
473 VALUE in mode MODE. */
0133b7d9 474rtx
502b8322 475const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 476{
5692c7bc
ZW
477 rtx real = rtx_alloc (CONST_DOUBLE);
478 PUT_MODE (real, mode);
479
9e254451 480 real->u.rv = value;
5692c7bc
ZW
481
482 return lookup_const_double (real);
483}
484
091a3ac7
CF
485/* Determine whether FIXED, a CONST_FIXED, already exists in the
486 hash table. If so, return its counterpart; otherwise add it
487 to the hash table and return it. */
488
489static rtx
490lookup_const_fixed (rtx fixed)
491{
492 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
493 if (*slot == 0)
494 *slot = fixed;
495
496 return (rtx) *slot;
497}
498
499/* Return a CONST_FIXED rtx for a fixed-point value specified by
500 VALUE in mode MODE. */
501
502rtx
503const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
504{
505 rtx fixed = rtx_alloc (CONST_FIXED);
506 PUT_MODE (fixed, mode);
507
508 fixed->u.fv = value;
509
510 return lookup_const_fixed (fixed);
511}
512
807e902e 513#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
514/* Constructs double_int from rtx CST. */
515
516double_int
517rtx_to_double_int (const_rtx cst)
518{
519 double_int r;
520
521 if (CONST_INT_P (cst))
27bcd47c 522 r = double_int::from_shwi (INTVAL (cst));
48175537 523 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
524 {
525 r.low = CONST_DOUBLE_LOW (cst);
526 r.high = CONST_DOUBLE_HIGH (cst);
527 }
528 else
529 gcc_unreachable ();
530
531 return r;
532}
807e902e 533#endif
3e93ff81 534
807e902e
KZ
535#if TARGET_SUPPORTS_WIDE_INT
536/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
537 If so, return its counterpart; otherwise add it to the hash table and
538 return it. */
3e93ff81 539
807e902e
KZ
540static rtx
541lookup_const_wide_int (rtx wint)
542{
543 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
544 if (*slot == 0)
545 *slot = wint;
546
547 return (rtx) *slot;
548}
549#endif
550
551/* Return an rtx constant for V, given that the constant has mode MODE.
552 The returned rtx will be a CONST_INT if V fits, otherwise it will be
553 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
554 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0
AS
555
556rtx
807e902e 557immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
54fb1ae0 558{
807e902e
KZ
559 unsigned int len = v.get_len ();
560 unsigned int prec = GET_MODE_PRECISION (mode);
561
562 /* Allow truncation but not extension since we do not know if the
563 number is signed or unsigned. */
564 gcc_assert (prec <= v.get_precision ());
565
566 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
567 return gen_int_mode (v.elt (0), mode);
568
569#if TARGET_SUPPORTS_WIDE_INT
570 {
571 unsigned int i;
572 rtx value;
573 unsigned int blocks_needed
574 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
575
576 if (len > blocks_needed)
577 len = blocks_needed;
578
579 value = const_wide_int_alloc (len);
580
581 /* It is so tempting to just put the mode in here. Must control
582 myself ... */
583 PUT_MODE (value, VOIDmode);
584 CWI_PUT_NUM_ELEM (value, len);
585
586 for (i = 0; i < len; i++)
587 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
588
589 return lookup_const_wide_int (value);
590 }
591#else
592 return immed_double_const (v.elt (0), v.elt (1), mode);
593#endif
54fb1ae0
AS
594}
595
807e902e 596#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
597/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
598 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 599 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
600 implied upper bits are copies of the high bit of i1. The value
601 itself is neither signed nor unsigned. Do not use this routine for
602 non-integer modes; convert to REAL_VALUE_TYPE and use
603 CONST_DOUBLE_FROM_REAL_VALUE. */
5692c7bc
ZW
604
605rtx
502b8322 606immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
607{
608 rtx value;
609 unsigned int i;
610
65acccdd 611 /* There are the following cases (note that there are no modes with
49ab6098 612 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
613
614 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
615 gen_int_mode.
929e10f4
MS
616 2) If the value of the integer fits into HOST_WIDE_INT anyway
617 (i.e., i1 consists only from copies of the sign bit, and sign
618 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 619 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
620 if (mode != VOIDmode)
621 {
5b0264cb
NS
622 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
623 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
624 /* We can get a 0 for an error mark. */
625 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
626 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 627
65acccdd
ZD
628 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
629 return gen_int_mode (i0, mode);
5692c7bc
ZW
630 }
631
632 /* If this integer fits in one word, return a CONST_INT. */
633 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
634 return GEN_INT (i0);
635
636 /* We use VOIDmode for integers. */
637 value = rtx_alloc (CONST_DOUBLE);
638 PUT_MODE (value, VOIDmode);
639
640 CONST_DOUBLE_LOW (value) = i0;
641 CONST_DOUBLE_HIGH (value) = i1;
642
643 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
644 XWINT (value, i) = 0;
645
646 return lookup_const_double (value);
0133b7d9 647}
807e902e 648#endif
0133b7d9 649
3b80f6ca 650rtx
502b8322 651gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
652{
653 /* In case the MD file explicitly references the frame pointer, have
654 all such references point to the same frame pointer. This is
655 used during frame pointer elimination to distinguish the explicit
656 references to these registers from pseudos that happened to be
657 assigned to them.
658
659 If we have eliminated the frame pointer or arg pointer, we will
660 be using it as a normal register, for example as a spill
661 register. In such cases, we might be accessing it in a mode that
662 is not Pmode and therefore cannot use the pre-allocated rtx.
663
664 Also don't do this when we are making new REGs in reload, since
665 we don't want to get confused with the real pointers. */
666
55a2c322 667 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 668 {
e10c79fe
LB
669 if (regno == FRAME_POINTER_REGNUM
670 && (!reload_completed || frame_pointer_needed))
3b80f6ca 671 return frame_pointer_rtx;
e3339d0f 672#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
673 if (regno == HARD_FRAME_POINTER_REGNUM
674 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
675 return hard_frame_pointer_rtx;
676#endif
e3339d0f 677#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 678 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
679 return arg_pointer_rtx;
680#endif
681#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 682 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
683 return return_address_pointer_rtx;
684#endif
fc555370 685 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 686 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 687 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 688 return pic_offset_table_rtx;
bcb33994 689 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
690 return stack_pointer_rtx;
691 }
692
006a94b0 693#if 0
6cde4876 694 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
695 an existing entry in that table to avoid useless generation of RTL.
696
697 This code is disabled for now until we can fix the various backends
698 which depend on having non-shared hard registers in some cases. Long
699 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
700 on the amount of useless RTL that gets generated.
701
702 We'll also need to fix some code that runs after reload that wants to
703 set ORIGINAL_REGNO. */
704
6cde4876
JL
705 if (cfun
706 && cfun->emit
707 && regno_reg_rtx
708 && regno < FIRST_PSEUDO_REGISTER
709 && reg_raw_mode[regno] == mode)
710 return regno_reg_rtx[regno];
006a94b0 711#endif
6cde4876 712
08394eef 713 return gen_raw_REG (mode, regno);
3b80f6ca
RH
714}
715
41472af8 716rtx
502b8322 717gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
718{
719 rtx rt = gen_rtx_raw_MEM (mode, addr);
720
721 /* This field is not cleared by the mere allocation of the rtx, so
722 we clear it here. */
173b24b9 723 MEM_ATTRS (rt) = 0;
41472af8
MM
724
725 return rt;
726}
ddef6bc7 727
542a8afa
RH
728/* Generate a memory referring to non-trapping constant memory. */
729
730rtx
731gen_const_mem (enum machine_mode mode, rtx addr)
732{
733 rtx mem = gen_rtx_MEM (mode, addr);
734 MEM_READONLY_P (mem) = 1;
735 MEM_NOTRAP_P (mem) = 1;
736 return mem;
737}
738
bf877a76
R
739/* Generate a MEM referring to fixed portions of the frame, e.g., register
740 save areas. */
741
742rtx
743gen_frame_mem (enum machine_mode mode, rtx addr)
744{
745 rtx mem = gen_rtx_MEM (mode, addr);
746 MEM_NOTRAP_P (mem) = 1;
747 set_mem_alias_set (mem, get_frame_alias_set ());
748 return mem;
749}
750
751/* Generate a MEM referring to a temporary use of the stack, not part
752 of the fixed stack frame. For example, something which is pushed
753 by a target splitter. */
754rtx
755gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
756{
757 rtx mem = gen_rtx_MEM (mode, addr);
758 MEM_NOTRAP_P (mem) = 1;
e3b5732b 759 if (!cfun->calls_alloca)
bf877a76
R
760 set_mem_alias_set (mem, get_frame_alias_set ());
761 return mem;
762}
763
beb72684
RH
764/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
765 this construct would be valid, and false otherwise. */
766
767bool
768validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 769 const_rtx reg, unsigned int offset)
ddef6bc7 770{
beb72684
RH
771 unsigned int isize = GET_MODE_SIZE (imode);
772 unsigned int osize = GET_MODE_SIZE (omode);
773
774 /* All subregs must be aligned. */
775 if (offset % osize != 0)
776 return false;
777
778 /* The subreg offset cannot be outside the inner object. */
779 if (offset >= isize)
780 return false;
781
782 /* ??? This should not be here. Temporarily continue to allow word_mode
783 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
784 Generally, backends are doing something sketchy but it'll take time to
785 fix them all. */
786 if (omode == word_mode)
787 ;
788 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
789 is the culprit here, and not the backends. */
790 else if (osize >= UNITS_PER_WORD && isize >= osize)
791 ;
792 /* Allow component subregs of complex and vector. Though given the below
793 extraction rules, it's not always clear what that means. */
794 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
795 && GET_MODE_INNER (imode) == omode)
796 ;
797 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
798 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
799 represent this. It's questionable if this ought to be represented at
800 all -- why can't this all be hidden in post-reload splitters that make
801 arbitrarily mode changes to the registers themselves. */
802 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
803 ;
804 /* Subregs involving floating point modes are not allowed to
805 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
806 (subreg:SI (reg:DF) 0) isn't. */
807 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
808 {
55a2c322
VM
809 if (! (isize == osize
810 /* LRA can use subreg to store a floating point value in
811 an integer mode. Although the floating point and the
812 integer modes need the same number of hard registers,
813 the size of floating point mode can be less than the
814 integer mode. LRA also uses subregs for a register
815 should be used in different mode in on insn. */
816 || lra_in_progress))
beb72684
RH
817 return false;
818 }
ddef6bc7 819
beb72684
RH
820 /* Paradoxical subregs must have offset zero. */
821 if (osize > isize)
822 return offset == 0;
823
824 /* This is a normal subreg. Verify that the offset is representable. */
825
826 /* For hard registers, we already have most of these rules collected in
827 subreg_offset_representable_p. */
828 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
829 {
830 unsigned int regno = REGNO (reg);
831
832#ifdef CANNOT_CHANGE_MODE_CLASS
833 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
834 && GET_MODE_INNER (imode) == omode)
835 ;
836 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
837 return false;
ddef6bc7 838#endif
beb72684
RH
839
840 return subreg_offset_representable_p (regno, imode, offset, omode);
841 }
842
843 /* For pseudo registers, we want most of the same checks. Namely:
844 If the register no larger than a word, the subreg must be lowpart.
845 If the register is larger than a word, the subreg must be the lowpart
846 of a subword. A subreg does *not* perform arbitrary bit extraction.
847 Given that we've already checked mode/offset alignment, we only have
848 to check subword subregs here. */
55a2c322
VM
849 if (osize < UNITS_PER_WORD
850 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684
RH
851 {
852 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
853 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
854 if (offset % UNITS_PER_WORD != low_off)
855 return false;
856 }
857 return true;
858}
859
860rtx
861gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
862{
863 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 864 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
865}
866
173b24b9
RK
867/* Generate a SUBREG representing the least-significant part of REG if MODE
868 is smaller than mode of REG, otherwise paradoxical SUBREG. */
869
ddef6bc7 870rtx
502b8322 871gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
872{
873 enum machine_mode inmode;
ddef6bc7
JJ
874
875 inmode = GET_MODE (reg);
876 if (inmode == VOIDmode)
877 inmode = mode;
e0e08ac2
JH
878 return gen_rtx_SUBREG (mode, reg,
879 subreg_lowpart_offset (mode, inmode));
ddef6bc7 880}
fcc74520
RS
881
882rtx
883gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
884 enum var_init_status status)
885{
886 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
887 PAT_VAR_LOCATION_STATUS (x) = status;
888 return x;
889}
c5c76735 890\f
23b2ce53 891
80379f51
PB
892/* Create an rtvec and stores within it the RTXen passed in the arguments. */
893
23b2ce53 894rtvec
e34d07f2 895gen_rtvec (int n, ...)
23b2ce53 896{
80379f51
PB
897 int i;
898 rtvec rt_val;
e34d07f2 899 va_list p;
23b2ce53 900
e34d07f2 901 va_start (p, n);
23b2ce53 902
80379f51 903 /* Don't allocate an empty rtvec... */
23b2ce53 904 if (n == 0)
0edf1bb2
JL
905 {
906 va_end (p);
907 return NULL_RTVEC;
908 }
23b2ce53 909
80379f51 910 rt_val = rtvec_alloc (n);
4f90e4a0 911
23b2ce53 912 for (i = 0; i < n; i++)
80379f51 913 rt_val->elem[i] = va_arg (p, rtx);
6268b922 914
e34d07f2 915 va_end (p);
80379f51 916 return rt_val;
23b2ce53
RS
917}
918
919rtvec
502b8322 920gen_rtvec_v (int n, rtx *argp)
23b2ce53 921{
b3694847
SS
922 int i;
923 rtvec rt_val;
23b2ce53 924
80379f51 925 /* Don't allocate an empty rtvec... */
23b2ce53 926 if (n == 0)
80379f51 927 return NULL_RTVEC;
23b2ce53 928
80379f51 929 rt_val = rtvec_alloc (n);
23b2ce53
RS
930
931 for (i = 0; i < n; i++)
8f985ec4 932 rt_val->elem[i] = *argp++;
23b2ce53
RS
933
934 return rt_val;
935}
e6eda746
DM
936
937rtvec
938gen_rtvec_v (int n, rtx_insn **argp)
939{
940 int i;
941 rtvec rt_val;
942
943 /* Don't allocate an empty rtvec... */
944 if (n == 0)
945 return NULL_RTVEC;
946
947 rt_val = rtvec_alloc (n);
948
949 for (i = 0; i < n; i++)
950 rt_val->elem[i] = *argp++;
951
952 return rt_val;
953}
954
23b2ce53 955\f
38ae7651
RS
956/* Return the number of bytes between the start of an OUTER_MODE
957 in-memory value and the start of an INNER_MODE in-memory value,
958 given that the former is a lowpart of the latter. It may be a
959 paradoxical lowpart, in which case the offset will be negative
960 on big-endian targets. */
961
962int
963byte_lowpart_offset (enum machine_mode outer_mode,
964 enum machine_mode inner_mode)
965{
966 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
967 return subreg_lowpart_offset (outer_mode, inner_mode);
968 else
969 return -subreg_lowpart_offset (inner_mode, outer_mode);
970}
971\f
23b2ce53
RS
972/* Generate a REG rtx for a new pseudo register of mode MODE.
973 This pseudo is assigned the next sequential register number. */
974
975rtx
502b8322 976gen_reg_rtx (enum machine_mode mode)
23b2ce53 977{
b3694847 978 rtx val;
2e3f842f 979 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 980
f8335a4f 981 gcc_assert (can_create_pseudo_p ());
23b2ce53 982
2e3f842f
L
983 /* If a virtual register with bigger mode alignment is generated,
984 increase stack alignment estimation because it might be spilled
985 to stack later. */
b8698a0f 986 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
987 && crtl->stack_alignment_estimated < align
988 && !crtl->stack_realign_processed)
ae58e548
JJ
989 {
990 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
991 if (crtl->stack_alignment_estimated < min_align)
992 crtl->stack_alignment_estimated = min_align;
993 }
2e3f842f 994
1b3d8f8a
GK
995 if (generating_concat_p
996 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
997 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
998 {
999 /* For complex modes, don't make a single pseudo.
1000 Instead, make a CONCAT of two pseudos.
1001 This allows noncontiguous allocation of the real and imaginary parts,
1002 which makes much better code. Besides, allocating DCmode
1003 pseudos overstrains reload on some machines like the 386. */
1004 rtx realpart, imagpart;
27e58a70 1005 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1006
1007 realpart = gen_reg_rtx (partmode);
1008 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1009 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1010 }
1011
004a7e45
UB
1012 /* Do not call gen_reg_rtx with uninitialized crtl. */
1013 gcc_assert (crtl->emit.regno_pointer_align_length);
1014
a560d4d4 1015 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 1016 enough to have an element for this pseudo reg number. */
23b2ce53 1017
3e029763 1018 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 1019 {
3e029763 1020 int old_size = crtl->emit.regno_pointer_align_length;
60564289 1021 char *tmp;
0d4903b8 1022 rtx *new1;
0d4903b8 1023
60564289
KG
1024 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1025 memset (tmp + old_size, 0, old_size);
1026 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 1027
1b4572a8 1028 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 1029 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
1030 regno_reg_rtx = new1;
1031
3e029763 1032 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
1033 }
1034
08394eef 1035 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
1036 regno_reg_rtx[reg_rtx_no++] = val;
1037 return val;
1038}
1039
a698cc03
JL
1040/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1041
1042bool
1043reg_is_parm_p (rtx reg)
1044{
1045 tree decl;
1046
1047 gcc_assert (REG_P (reg));
1048 decl = REG_EXPR (reg);
1049 return (decl && TREE_CODE (decl) == PARM_DECL);
1050}
1051
38ae7651
RS
1052/* Update NEW with the same attributes as REG, but with OFFSET added
1053 to the REG_OFFSET. */
a560d4d4 1054
e53a16e7 1055static void
60564289 1056update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 1057{
60564289 1058 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 1059 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1060}
1061
38ae7651
RS
1062/* Generate a register with same attributes as REG, but with OFFSET
1063 added to the REG_OFFSET. */
e53a16e7
ILT
1064
1065rtx
1066gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1067 int offset)
1068{
60564289 1069 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1070
60564289
KG
1071 update_reg_offset (new_rtx, reg, offset);
1072 return new_rtx;
e53a16e7
ILT
1073}
1074
1075/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1076 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1077
1078rtx
1079gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1080{
60564289 1081 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1082
60564289
KG
1083 update_reg_offset (new_rtx, reg, offset);
1084 return new_rtx;
a560d4d4
JH
1085}
1086
38ae7651
RS
1087/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1088 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1089
1090void
38ae7651 1091adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 1092{
38ae7651
RS
1093 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1094 PUT_MODE (reg, mode);
1095}
1096
1097/* Copy REG's attributes from X, if X has any attributes. If REG and X
1098 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1099
1100void
1101set_reg_attrs_from_value (rtx reg, rtx x)
1102{
1103 int offset;
de6f3f7a
L
1104 bool can_be_reg_pointer = true;
1105
1106 /* Don't call mark_reg_pointer for incompatible pointer sign
1107 extension. */
1108 while (GET_CODE (x) == SIGN_EXTEND
1109 || GET_CODE (x) == ZERO_EXTEND
1110 || GET_CODE (x) == TRUNCATE
1111 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1112 {
1113#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1114 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1115 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1116 can_be_reg_pointer = false;
1117#endif
1118 x = XEXP (x, 0);
1119 }
38ae7651 1120
923ba36f
JJ
1121 /* Hard registers can be reused for multiple purposes within the same
1122 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1123 on them is wrong. */
1124 if (HARD_REGISTER_P (reg))
1125 return;
1126
38ae7651 1127 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1128 if (MEM_P (x))
1129 {
527210c4
RS
1130 if (MEM_OFFSET_KNOWN_P (x))
1131 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1132 MEM_OFFSET (x) + offset);
de6f3f7a 1133 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1134 mark_reg_pointer (reg, 0);
46b71b03
PB
1135 }
1136 else if (REG_P (x))
1137 {
1138 if (REG_ATTRS (x))
1139 update_reg_offset (reg, x, offset);
de6f3f7a 1140 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1141 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1142 }
1143}
1144
1145/* Generate a REG rtx for a new pseudo register, copying the mode
1146 and attributes from X. */
1147
1148rtx
1149gen_reg_rtx_and_attrs (rtx x)
1150{
1151 rtx reg = gen_reg_rtx (GET_MODE (x));
1152 set_reg_attrs_from_value (reg, x);
1153 return reg;
a560d4d4
JH
1154}
1155
9d18e06b
JZ
1156/* Set the register attributes for registers contained in PARM_RTX.
1157 Use needed values from memory attributes of MEM. */
1158
1159void
502b8322 1160set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1161{
f8cfc6aa 1162 if (REG_P (parm_rtx))
38ae7651 1163 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1164 else if (GET_CODE (parm_rtx) == PARALLEL)
1165 {
1166 /* Check for a NULL entry in the first slot, used to indicate that the
1167 parameter goes both on the stack and in registers. */
1168 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1169 for (; i < XVECLEN (parm_rtx, 0); i++)
1170 {
1171 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1172 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1173 REG_ATTRS (XEXP (x, 0))
1174 = get_reg_attrs (MEM_EXPR (mem),
1175 INTVAL (XEXP (x, 1)));
1176 }
1177 }
1178}
1179
38ae7651
RS
1180/* Set the REG_ATTRS for registers in value X, given that X represents
1181 decl T. */
a560d4d4 1182
4e3825db 1183void
38ae7651
RS
1184set_reg_attrs_for_decl_rtl (tree t, rtx x)
1185{
1186 if (GET_CODE (x) == SUBREG)
fbe6ec81 1187 {
38ae7651
RS
1188 gcc_assert (subreg_lowpart_p (x));
1189 x = SUBREG_REG (x);
fbe6ec81 1190 }
f8cfc6aa 1191 if (REG_P (x))
38ae7651
RS
1192 REG_ATTRS (x)
1193 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1194 DECL_MODE (t)));
a560d4d4
JH
1195 if (GET_CODE (x) == CONCAT)
1196 {
1197 if (REG_P (XEXP (x, 0)))
1198 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1199 if (REG_P (XEXP (x, 1)))
1200 REG_ATTRS (XEXP (x, 1))
1201 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1202 }
1203 if (GET_CODE (x) == PARALLEL)
1204 {
d4afac5b
JZ
1205 int i, start;
1206
1207 /* Check for a NULL entry, used to indicate that the parameter goes
1208 both on the stack and in registers. */
1209 if (XEXP (XVECEXP (x, 0, 0), 0))
1210 start = 0;
1211 else
1212 start = 1;
1213
1214 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1215 {
1216 rtx y = XVECEXP (x, 0, i);
1217 if (REG_P (XEXP (y, 0)))
1218 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1219 }
1220 }
1221}
1222
38ae7651
RS
1223/* Assign the RTX X to declaration T. */
1224
1225void
1226set_decl_rtl (tree t, rtx x)
1227{
1228 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1229 if (x)
1230 set_reg_attrs_for_decl_rtl (t, x);
1231}
1232
5141868d
RS
1233/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1234 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1235
1236void
5141868d 1237set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1238{
1239 DECL_INCOMING_RTL (t) = x;
5141868d 1240 if (x && !by_reference_p)
38ae7651
RS
1241 set_reg_attrs_for_decl_rtl (t, x);
1242}
1243
754fdcca
RK
1244/* Identify REG (which may be a CONCAT) as a user register. */
1245
1246void
502b8322 1247mark_user_reg (rtx reg)
754fdcca
RK
1248{
1249 if (GET_CODE (reg) == CONCAT)
1250 {
1251 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1252 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1253 }
754fdcca 1254 else
5b0264cb
NS
1255 {
1256 gcc_assert (REG_P (reg));
1257 REG_USERVAR_P (reg) = 1;
1258 }
754fdcca
RK
1259}
1260
86fe05e0
RK
1261/* Identify REG as a probable pointer register and show its alignment
1262 as ALIGN, if nonzero. */
23b2ce53
RS
1263
1264void
502b8322 1265mark_reg_pointer (rtx reg, int align)
23b2ce53 1266{
3502dc9c 1267 if (! REG_POINTER (reg))
00995e78 1268 {
3502dc9c 1269 REG_POINTER (reg) = 1;
86fe05e0 1270
00995e78
RE
1271 if (align)
1272 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1273 }
1274 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1275 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1276 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1277}
1278
1279/* Return 1 plus largest pseudo reg number used in the current function. */
1280
1281int
502b8322 1282max_reg_num (void)
23b2ce53
RS
1283{
1284 return reg_rtx_no;
1285}
1286
1287/* Return 1 + the largest label number used so far in the current function. */
1288
1289int
502b8322 1290max_label_num (void)
23b2ce53 1291{
23b2ce53
RS
1292 return label_num;
1293}
1294
1295/* Return first label number used in this function (if any were used). */
1296
1297int
502b8322 1298get_first_label_num (void)
23b2ce53
RS
1299{
1300 return first_label_num;
1301}
6de9cd9a
DN
1302
1303/* If the rtx for label was created during the expansion of a nested
1304 function, then first_label_num won't include this label number.
fa10beec 1305 Fix this now so that array indices work later. */
6de9cd9a
DN
1306
1307void
1308maybe_set_first_label_num (rtx x)
1309{
1310 if (CODE_LABEL_NUMBER (x) < first_label_num)
1311 first_label_num = CODE_LABEL_NUMBER (x);
1312}
23b2ce53
RS
1313\f
1314/* Return a value representing some low-order bits of X, where the number
1315 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1316 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1317 representation is returned.
1318
1319 This function handles the cases in common between gen_lowpart, below,
1320 and two variants in cse.c and combine.c. These are the cases that can
1321 be safely handled at all points in the compilation.
1322
1323 If this is not a case we can handle, return 0. */
1324
1325rtx
502b8322 1326gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1327{
ddef6bc7 1328 int msize = GET_MODE_SIZE (mode);
550d1387 1329 int xsize;
ddef6bc7 1330 int offset = 0;
550d1387
GK
1331 enum machine_mode innermode;
1332
1333 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1334 so we have to make one up. Yuk. */
1335 innermode = GET_MODE (x);
481683e1 1336 if (CONST_INT_P (x)
db487452 1337 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1338 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1339 else if (innermode == VOIDmode)
49ab6098 1340 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1341
550d1387
GK
1342 xsize = GET_MODE_SIZE (innermode);
1343
5b0264cb 1344 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1345
550d1387 1346 if (innermode == mode)
23b2ce53
RS
1347 return x;
1348
1349 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1350 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1351 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1352 return 0;
1353
53501a19 1354 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1355 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1356 return 0;
1357
550d1387 1358 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1359
1360 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1361 && (GET_MODE_CLASS (mode) == MODE_INT
1362 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1363 {
1364 /* If we are getting the low-order part of something that has been
1365 sign- or zero-extended, we can either just use the object being
1366 extended or make a narrower extension. If we want an even smaller
1367 piece than the size of the object being extended, call ourselves
1368 recursively.
1369
1370 This case is used mostly by combine and cse. */
1371
1372 if (GET_MODE (XEXP (x, 0)) == mode)
1373 return XEXP (x, 0);
550d1387 1374 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1375 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1376 else if (msize < xsize)
3b80f6ca 1377 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1378 }
f8cfc6aa 1379 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1380 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1381 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
550d1387 1382 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1383
23b2ce53
RS
1384 /* Otherwise, we can't do this. */
1385 return 0;
1386}
1387\f
ccba022b 1388rtx
502b8322 1389gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1390{
ddef6bc7 1391 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1392 rtx result;
ddef6bc7 1393
ccba022b
RS
1394 /* This case loses if X is a subreg. To catch bugs early,
1395 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1396 gcc_assert (msize <= UNITS_PER_WORD
1397 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1398
e0e08ac2
JH
1399 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1400 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1401 gcc_assert (result);
b8698a0f 1402
09482e0d
JW
1403 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1404 the target if we have a MEM. gen_highpart must return a valid operand,
1405 emitting code if necessary to do so. */
5b0264cb
NS
1406 if (MEM_P (result))
1407 {
1408 result = validize_mem (result);
1409 gcc_assert (result);
1410 }
b8698a0f 1411
e0e08ac2
JH
1412 return result;
1413}
5222e470 1414
26d249eb 1415/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1416 be VOIDmode constant. */
1417rtx
502b8322 1418gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1419{
1420 if (GET_MODE (exp) != VOIDmode)
1421 {
5b0264cb 1422 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1423 return gen_highpart (outermode, exp);
1424 }
1425 return simplify_gen_subreg (outermode, exp, innermode,
1426 subreg_highpart_offset (outermode, innermode));
1427}
68252e27 1428
38ae7651 1429/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1430
e0e08ac2 1431unsigned int
502b8322 1432subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1433{
1434 unsigned int offset = 0;
1435 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1436
e0e08ac2 1437 if (difference > 0)
ccba022b 1438 {
e0e08ac2
JH
1439 if (WORDS_BIG_ENDIAN)
1440 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1441 if (BYTES_BIG_ENDIAN)
1442 offset += difference % UNITS_PER_WORD;
ccba022b 1443 }
ddef6bc7 1444
e0e08ac2 1445 return offset;
ccba022b 1446}
eea50aa0 1447
e0e08ac2
JH
1448/* Return offset in bytes to get OUTERMODE high part
1449 of the value in mode INNERMODE stored in memory in target format. */
1450unsigned int
502b8322 1451subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1452{
1453 unsigned int offset = 0;
1454 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1455
5b0264cb 1456 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1457
eea50aa0
JH
1458 if (difference > 0)
1459 {
e0e08ac2 1460 if (! WORDS_BIG_ENDIAN)
eea50aa0 1461 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1462 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1463 offset += difference % UNITS_PER_WORD;
1464 }
1465
e0e08ac2 1466 return offset;
eea50aa0 1467}
ccba022b 1468
23b2ce53
RS
1469/* Return 1 iff X, assumed to be a SUBREG,
1470 refers to the least significant part of its containing reg.
1471 If X is not a SUBREG, always return 1 (it is its own low part!). */
1472
1473int
fa233e34 1474subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1475{
1476 if (GET_CODE (x) != SUBREG)
1477 return 1;
a3a03040
RK
1478 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1479 return 0;
23b2ce53 1480
e0e08ac2
JH
1481 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1482 == SUBREG_BYTE (x));
23b2ce53 1483}
6a4bdc79
BS
1484
1485/* Return true if X is a paradoxical subreg, false otherwise. */
1486bool
1487paradoxical_subreg_p (const_rtx x)
1488{
1489 if (GET_CODE (x) != SUBREG)
1490 return false;
1491 return (GET_MODE_PRECISION (GET_MODE (x))
1492 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1493}
23b2ce53 1494\f
ddef6bc7
JJ
1495/* Return subword OFFSET of operand OP.
1496 The word number, OFFSET, is interpreted as the word number starting
1497 at the low-order address. OFFSET 0 is the low-order word if not
1498 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1499
1500 If we cannot extract the required word, we return zero. Otherwise,
1501 an rtx corresponding to the requested word will be returned.
1502
1503 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1504 reload has completed, a valid address will always be returned. After
1505 reload, if a valid address cannot be returned, we return zero.
1506
1507 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1508 it is the responsibility of the caller.
1509
1510 MODE is the mode of OP in case it is a CONST_INT.
1511
1512 ??? This is still rather broken for some cases. The problem for the
1513 moment is that all callers of this thing provide no 'goal mode' to
1514 tell us to work with. This exists because all callers were written
0631e0bf
JH
1515 in a word based SUBREG world.
1516 Now use of this function can be deprecated by simplify_subreg in most
1517 cases.
1518 */
ddef6bc7
JJ
1519
1520rtx
502b8322 1521operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1522{
1523 if (mode == VOIDmode)
1524 mode = GET_MODE (op);
1525
5b0264cb 1526 gcc_assert (mode != VOIDmode);
ddef6bc7 1527
30f7a378 1528 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1529 if (mode != BLKmode
1530 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1531 return 0;
1532
30f7a378 1533 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1534 if (mode != BLKmode
1535 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1536 return const0_rtx;
1537
ddef6bc7 1538 /* Form a new MEM at the requested address. */
3c0cb5de 1539 if (MEM_P (op))
ddef6bc7 1540 {
60564289 1541 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1542
f1ec5147 1543 if (! validate_address)
60564289 1544 return new_rtx;
f1ec5147
RK
1545
1546 else if (reload_completed)
ddef6bc7 1547 {
09e881c9
BE
1548 if (! strict_memory_address_addr_space_p (word_mode,
1549 XEXP (new_rtx, 0),
1550 MEM_ADDR_SPACE (op)))
f1ec5147 1551 return 0;
ddef6bc7 1552 }
f1ec5147 1553 else
60564289 1554 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1555 }
1556
0631e0bf
JH
1557 /* Rest can be handled by simplify_subreg. */
1558 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1559}
1560
535a42b1
NS
1561/* Similar to `operand_subword', but never return 0. If we can't
1562 extract the required subword, put OP into a register and try again.
1563 The second attempt must succeed. We always validate the address in
1564 this case.
23b2ce53
RS
1565
1566 MODE is the mode of OP, in case it is CONST_INT. */
1567
1568rtx
502b8322 1569operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1570{
ddef6bc7 1571 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1572
1573 if (result)
1574 return result;
1575
1576 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1577 {
1578 /* If this is a register which can not be accessed by words, copy it
1579 to a pseudo register. */
f8cfc6aa 1580 if (REG_P (op))
77e6b0eb
JC
1581 op = copy_to_reg (op);
1582 else
1583 op = force_reg (mode, op);
1584 }
23b2ce53 1585
ddef6bc7 1586 result = operand_subword (op, offset, 1, mode);
5b0264cb 1587 gcc_assert (result);
23b2ce53
RS
1588
1589 return result;
1590}
1591\f
2b3493c8
AK
1592/* Returns 1 if both MEM_EXPR can be considered equal
1593 and 0 otherwise. */
1594
1595int
4f588890 1596mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1597{
1598 if (expr1 == expr2)
1599 return 1;
1600
1601 if (! expr1 || ! expr2)
1602 return 0;
1603
1604 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1605 return 0;
1606
55b34b5f 1607 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1608}
1609
805903b5
JJ
1610/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1611 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1612 -1 if not known. */
1613
1614int
d9223014 1615get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1616{
1617 tree expr;
1618 unsigned HOST_WIDE_INT offset;
1619
1620 /* This function can't use
527210c4 1621 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1622 || (MAX (MEM_ALIGN (mem),
0eb77834 1623 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1624 < align))
1625 return -1;
1626 else
527210c4 1627 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1628 for two reasons:
1629 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1630 for <variable>. get_inner_reference doesn't handle it and
1631 even if it did, the alignment in that case needs to be determined
1632 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1633 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1634 isn't sufficiently aligned, the object it is in might be. */
1635 gcc_assert (MEM_P (mem));
1636 expr = MEM_EXPR (mem);
527210c4 1637 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1638 return -1;
1639
527210c4 1640 offset = MEM_OFFSET (mem);
805903b5
JJ
1641 if (DECL_P (expr))
1642 {
1643 if (DECL_ALIGN (expr) < align)
1644 return -1;
1645 }
1646 else if (INDIRECT_REF_P (expr))
1647 {
1648 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1649 return -1;
1650 }
1651 else if (TREE_CODE (expr) == COMPONENT_REF)
1652 {
1653 while (1)
1654 {
1655 tree inner = TREE_OPERAND (expr, 0);
1656 tree field = TREE_OPERAND (expr, 1);
1657 tree byte_offset = component_ref_field_offset (expr);
1658 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1659
1660 if (!byte_offset
cc269bb6
RS
1661 || !tree_fits_uhwi_p (byte_offset)
1662 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1663 return -1;
1664
ae7e9ddd
RS
1665 offset += tree_to_uhwi (byte_offset);
1666 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1667
1668 if (inner == NULL_TREE)
1669 {
1670 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1671 < (unsigned int) align)
1672 return -1;
1673 break;
1674 }
1675 else if (DECL_P (inner))
1676 {
1677 if (DECL_ALIGN (inner) < align)
1678 return -1;
1679 break;
1680 }
1681 else if (TREE_CODE (inner) != COMPONENT_REF)
1682 return -1;
1683 expr = inner;
1684 }
1685 }
1686 else
1687 return -1;
1688
1689 return offset & ((align / BITS_PER_UNIT) - 1);
1690}
1691
6926c713 1692/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1693 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1694 if we are making a new object of this type. BITPOS is nonzero if
1695 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1696
1697void
502b8322
AJ
1698set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1699 HOST_WIDE_INT bitpos)
173b24b9 1700{
6f1087be 1701 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1702 tree type;
f12144dd 1703 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1704 addr_space_t as;
173b24b9
RK
1705
1706 /* It can happen that type_for_mode was given a mode for which there
1707 is no language-level type. In which case it returns NULL, which
1708 we can see here. */
1709 if (t == NULL_TREE)
1710 return;
1711
1712 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1713 if (type == error_mark_node)
1714 return;
173b24b9 1715
173b24b9
RK
1716 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1717 wrong answer, as it assumes that DECL_RTL already has the right alias
1718 info. Callers should not set DECL_RTL until after the call to
1719 set_mem_attributes. */
5b0264cb 1720 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1721
f12144dd
RS
1722 memset (&attrs, 0, sizeof (attrs));
1723
738cc472 1724 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1725 front-end routine) and use it. */
f12144dd 1726 attrs.alias = get_alias_set (t);
173b24b9 1727
a5e9c810 1728 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1729 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1730
268f7033 1731 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1732 refattrs = MEM_ATTRS (ref);
1733 if (refattrs)
268f7033
UW
1734 {
1735 /* ??? Can this ever happen? Calling this routine on a MEM that
1736 already carries memory attributes should probably be invalid. */
f12144dd 1737 attrs.expr = refattrs->expr;
754c3d5d 1738 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1739 attrs.offset = refattrs->offset;
754c3d5d 1740 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1741 attrs.size = refattrs->size;
1742 attrs.align = refattrs->align;
268f7033
UW
1743 }
1744
1745 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1746 else
268f7033 1747 {
f12144dd
RS
1748 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1749 gcc_assert (!defattrs->expr);
754c3d5d 1750 gcc_assert (!defattrs->offset_known_p);
f12144dd 1751
268f7033 1752 /* Respect mode size. */
754c3d5d 1753 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1754 attrs.size = defattrs->size;
268f7033
UW
1755 /* ??? Is this really necessary? We probably should always get
1756 the size from the type below. */
1757
1758 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1759 if T is an object, always compute the object alignment below. */
f12144dd
RS
1760 if (TYPE_P (t))
1761 attrs.align = defattrs->align;
1762 else
1763 attrs.align = BITS_PER_UNIT;
268f7033
UW
1764 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1765 e.g. if the type carries an alignment attribute. Should we be
1766 able to simply always use TYPE_ALIGN? */
1767 }
1768
c3d32120
RK
1769 /* We can set the alignment from the type if we are making an object,
1770 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1771 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1772 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1773
738cc472 1774 /* If the size is known, we can set that. */
a787ccc3 1775 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1776
30b0317c
RB
1777 /* The address-space is that of the type. */
1778 as = TYPE_ADDR_SPACE (type);
1779
80965c18
RK
1780 /* If T is not a type, we may be able to deduce some more information about
1781 the expression. */
1782 if (! TYPE_P (t))
8ac61af7 1783 {
8476af98 1784 tree base;
389fdba0 1785
8ac61af7
RK
1786 if (TREE_THIS_VOLATILE (t))
1787 MEM_VOLATILE_P (ref) = 1;
173b24b9 1788
c56e3582
RK
1789 /* Now remove any conversions: they don't change what the underlying
1790 object is. Likewise for SAVE_EXPR. */
1043771b 1791 while (CONVERT_EXPR_P (t)
c56e3582
RK
1792 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1793 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1794 t = TREE_OPERAND (t, 0);
1795
4994da65
RG
1796 /* Note whether this expression can trap. */
1797 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1798
1799 base = get_base_address (t);
f18a7b25
MJ
1800 if (base)
1801 {
1802 if (DECL_P (base)
1803 && TREE_READONLY (base)
1804 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1805 && !TREE_THIS_VOLATILE (base))
1806 MEM_READONLY_P (ref) = 1;
1807
1808 /* Mark static const strings readonly as well. */
1809 if (TREE_CODE (base) == STRING_CST
1810 && TREE_READONLY (base)
1811 && TREE_STATIC (base))
1812 MEM_READONLY_P (ref) = 1;
1813
30b0317c 1814 /* Address-space information is on the base object. */
f18a7b25
MJ
1815 if (TREE_CODE (base) == MEM_REF
1816 || TREE_CODE (base) == TARGET_MEM_REF)
1817 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1818 0))));
1819 else
1820 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1821 }
ba30e50d 1822
2039d7aa
RH
1823 /* If this expression uses it's parent's alias set, mark it such
1824 that we won't change it. */
b4ada065 1825 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
1826 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1827
8ac61af7
RK
1828 /* If this is a decl, set the attributes of the MEM from it. */
1829 if (DECL_P (t))
1830 {
f12144dd 1831 attrs.expr = t;
754c3d5d
RS
1832 attrs.offset_known_p = true;
1833 attrs.offset = 0;
6f1087be 1834 apply_bitpos = bitpos;
a787ccc3 1835 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1836 }
1837
30b0317c 1838 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1839 else if (CONSTANT_CLASS_P (t))
30b0317c 1840 ;
998d7deb 1841
a787ccc3
RS
1842 /* If this is a field reference, record it. */
1843 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1844 {
f12144dd 1845 attrs.expr = t;
754c3d5d
RS
1846 attrs.offset_known_p = true;
1847 attrs.offset = 0;
6f1087be 1848 apply_bitpos = bitpos;
a787ccc3
RS
1849 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1850 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1851 }
1852
1853 /* If this is an array reference, look for an outer field reference. */
1854 else if (TREE_CODE (t) == ARRAY_REF)
1855 {
1856 tree off_tree = size_zero_node;
1b1838b6
JW
1857 /* We can't modify t, because we use it at the end of the
1858 function. */
1859 tree t2 = t;
998d7deb
RH
1860
1861 do
1862 {
1b1838b6 1863 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1864 tree low_bound = array_ref_low_bound (t2);
1865 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1866
1867 /* We assume all arrays have sizes that are a multiple of a byte.
1868 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1869 index, then convert to sizetype and multiply by the size of
1870 the array element. */
1871 if (! integer_zerop (low_bound))
4845b383
KH
1872 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1873 index, low_bound);
2567406a 1874
44de5aeb 1875 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1876 size_binop (MULT_EXPR,
1877 fold_convert (sizetype,
1878 index),
44de5aeb
RK
1879 unit_size),
1880 off_tree);
1b1838b6 1881 t2 = TREE_OPERAND (t2, 0);
998d7deb 1882 }
1b1838b6 1883 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1884
30b0317c
RB
1885 if (DECL_P (t2)
1886 || TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1887 {
f12144dd 1888 attrs.expr = t2;
754c3d5d 1889 attrs.offset_known_p = false;
cc269bb6 1890 if (tree_fits_uhwi_p (off_tree))
6f1087be 1891 {
754c3d5d 1892 attrs.offset_known_p = true;
ae7e9ddd 1893 attrs.offset = tree_to_uhwi (off_tree);
6f1087be
RH
1894 apply_bitpos = bitpos;
1895 }
998d7deb 1896 }
30b0317c 1897 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1898 }
1899
56c47f22 1900 /* If this is an indirect reference, record it. */
70f34814 1901 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1902 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1903 {
f12144dd 1904 attrs.expr = t;
754c3d5d
RS
1905 attrs.offset_known_p = true;
1906 attrs.offset = 0;
56c47f22
RG
1907 apply_bitpos = bitpos;
1908 }
1909
30b0317c
RB
1910 /* Compute the alignment. */
1911 unsigned int obj_align;
1912 unsigned HOST_WIDE_INT obj_bitpos;
1913 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1914 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1915 if (obj_bitpos != 0)
1916 obj_align = (obj_bitpos & -obj_bitpos);
1917 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
1918 }
1919
cc269bb6 1920 if (tree_fits_uhwi_p (new_size))
a787ccc3
RS
1921 {
1922 attrs.size_known_p = true;
ae7e9ddd 1923 attrs.size = tree_to_uhwi (new_size);
a787ccc3
RS
1924 }
1925
15c812e3 1926 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1927 bit position offset. Similarly, increase the size of the accessed
1928 object to contain the negative offset. */
6f1087be 1929 if (apply_bitpos)
8c317c5f 1930 {
754c3d5d
RS
1931 gcc_assert (attrs.offset_known_p);
1932 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1933 if (attrs.size_known_p)
1934 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1935 }
6f1087be 1936
8ac61af7 1937 /* Now set the attributes we computed above. */
f18a7b25 1938 attrs.addrspace = as;
f12144dd 1939 set_mem_attrs (ref, &attrs);
173b24b9
RK
1940}
1941
6f1087be 1942void
502b8322 1943set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1944{
1945 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1946}
1947
173b24b9
RK
1948/* Set the alias set of MEM to SET. */
1949
1950void
4862826d 1951set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1952{
f12144dd
RS
1953 struct mem_attrs attrs;
1954
173b24b9 1955 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1956 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
1957 attrs = *get_mem_attrs (mem);
1958 attrs.alias = set;
1959 set_mem_attrs (mem, &attrs);
09e881c9
BE
1960}
1961
1962/* Set the address space of MEM to ADDRSPACE (target-defined). */
1963
1964void
1965set_mem_addr_space (rtx mem, addr_space_t addrspace)
1966{
f12144dd
RS
1967 struct mem_attrs attrs;
1968
1969 attrs = *get_mem_attrs (mem);
1970 attrs.addrspace = addrspace;
1971 set_mem_attrs (mem, &attrs);
173b24b9 1972}
738cc472 1973
d022d93e 1974/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1975
1976void
502b8322 1977set_mem_align (rtx mem, unsigned int align)
738cc472 1978{
f12144dd
RS
1979 struct mem_attrs attrs;
1980
1981 attrs = *get_mem_attrs (mem);
1982 attrs.align = align;
1983 set_mem_attrs (mem, &attrs);
738cc472 1984}
1285011e 1985
998d7deb 1986/* Set the expr for MEM to EXPR. */
1285011e
RK
1987
1988void
502b8322 1989set_mem_expr (rtx mem, tree expr)
1285011e 1990{
f12144dd
RS
1991 struct mem_attrs attrs;
1992
1993 attrs = *get_mem_attrs (mem);
1994 attrs.expr = expr;
1995 set_mem_attrs (mem, &attrs);
1285011e 1996}
998d7deb
RH
1997
1998/* Set the offset of MEM to OFFSET. */
1999
2000void
527210c4 2001set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 2002{
f12144dd
RS
2003 struct mem_attrs attrs;
2004
2005 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2006 attrs.offset_known_p = true;
2007 attrs.offset = offset;
527210c4
RS
2008 set_mem_attrs (mem, &attrs);
2009}
2010
2011/* Clear the offset of MEM. */
2012
2013void
2014clear_mem_offset (rtx mem)
2015{
2016 struct mem_attrs attrs;
2017
2018 attrs = *get_mem_attrs (mem);
754c3d5d 2019 attrs.offset_known_p = false;
f12144dd 2020 set_mem_attrs (mem, &attrs);
35aff10b
AM
2021}
2022
2023/* Set the size of MEM to SIZE. */
2024
2025void
f5541398 2026set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 2027{
f12144dd
RS
2028 struct mem_attrs attrs;
2029
2030 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2031 attrs.size_known_p = true;
2032 attrs.size = size;
f5541398
RS
2033 set_mem_attrs (mem, &attrs);
2034}
2035
2036/* Clear the size of MEM. */
2037
2038void
2039clear_mem_size (rtx mem)
2040{
2041 struct mem_attrs attrs;
2042
2043 attrs = *get_mem_attrs (mem);
754c3d5d 2044 attrs.size_known_p = false;
f12144dd 2045 set_mem_attrs (mem, &attrs);
998d7deb 2046}
173b24b9 2047\f
738cc472
RK
2048/* Return a memory reference like MEMREF, but with its mode changed to MODE
2049 and its address changed to ADDR. (VOIDmode means don't change the mode.
2050 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2051 returned memory location is required to be valid. INPLACE is true if any
2052 changes can be made directly to MEMREF or false if MEMREF must be treated
2053 as immutable.
2054
2055 The memory attributes are not changed. */
23b2ce53 2056
738cc472 2057static rtx
23b33725
RS
2058change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2059 bool inplace)
23b2ce53 2060{
09e881c9 2061 addr_space_t as;
60564289 2062 rtx new_rtx;
23b2ce53 2063
5b0264cb 2064 gcc_assert (MEM_P (memref));
09e881c9 2065 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2066 if (mode == VOIDmode)
2067 mode = GET_MODE (memref);
2068 if (addr == 0)
2069 addr = XEXP (memref, 0);
a74ff877 2070 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2071 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2072 return memref;
23b2ce53 2073
91c5ee5b
VM
2074 /* Don't validate address for LRA. LRA can make the address valid
2075 by itself in most efficient way. */
2076 if (validate && !lra_in_progress)
23b2ce53 2077 {
f1ec5147 2078 if (reload_in_progress || reload_completed)
09e881c9 2079 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2080 else
09e881c9 2081 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2082 }
750c9258 2083
9b04c6a8
RK
2084 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2085 return memref;
2086
23b33725
RS
2087 if (inplace)
2088 {
2089 XEXP (memref, 0) = addr;
2090 return memref;
2091 }
2092
60564289
KG
2093 new_rtx = gen_rtx_MEM (mode, addr);
2094 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2095 return new_rtx;
23b2ce53 2096}
792760b9 2097
738cc472
RK
2098/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2099 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2100
2101rtx
502b8322 2102change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 2103{
23b33725 2104 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
60564289 2105 enum machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2106 struct mem_attrs attrs, *defattrs;
4e44c1ef 2107
f12144dd
RS
2108 attrs = *get_mem_attrs (memref);
2109 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2110 attrs.expr = NULL_TREE;
2111 attrs.offset_known_p = false;
2112 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2113 attrs.size = defattrs->size;
2114 attrs.align = defattrs->align;
c2f7bcc3 2115
fdb1c7b3 2116 /* If there are no changes, just return the original memory reference. */
60564289 2117 if (new_rtx == memref)
4e44c1ef 2118 {
f12144dd 2119 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2120 return new_rtx;
4e44c1ef 2121
60564289
KG
2122 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2123 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2124 }
fdb1c7b3 2125
f12144dd 2126 set_mem_attrs (new_rtx, &attrs);
60564289 2127 return new_rtx;
f4ef873c 2128}
792760b9 2129
738cc472
RK
2130/* Return a memory reference like MEMREF, but with its mode changed
2131 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2132 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2133 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2134 and the caller is responsible for adjusting MEMREF base register.
2135 If ADJUST_OBJECT is zero, the underlying object associated with the
2136 memory reference is left unchanged and the caller is responsible for
2137 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2138 the underlying object, even partially, then the object is dropped.
2139 SIZE, if nonzero, is the size of an access in cases where MODE
2140 has no inherent size. */
f1ec5147
RK
2141
2142rtx
502b8322 2143adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2144 int validate, int adjust_address, int adjust_object,
2145 HOST_WIDE_INT size)
f1ec5147 2146{
823e3574 2147 rtx addr = XEXP (memref, 0);
60564289 2148 rtx new_rtx;
f12144dd 2149 enum machine_mode address_mode;
a6fe9ed4 2150 int pbits;
0207fa90 2151 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2152 unsigned HOST_WIDE_INT max_align;
0207fa90
EB
2153#ifdef POINTERS_EXTEND_UNSIGNED
2154 enum machine_mode pointer_mode
2155 = targetm.addr_space.pointer_mode (attrs.addrspace);
2156#endif
823e3574 2157
ee88e690
EB
2158 /* VOIDmode means no mode change for change_address_1. */
2159 if (mode == VOIDmode)
2160 mode = GET_MODE (memref);
2161
5f2cbd0d
RS
2162 /* Take the size of non-BLKmode accesses from the mode. */
2163 defattrs = mode_mem_attrs[(int) mode];
2164 if (defattrs->size_known_p)
2165 size = defattrs->size;
2166
fdb1c7b3
JH
2167 /* If there are no changes, just return the original memory reference. */
2168 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2169 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2170 && (!validate || memory_address_addr_space_p (mode, addr,
2171 attrs.addrspace)))
fdb1c7b3
JH
2172 return memref;
2173
d14419e4 2174 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2175 This may happen even if offset is nonzero -- consider
d14419e4
RH
2176 (plus (plus reg reg) const_int) -- so do this always. */
2177 addr = copy_rtx (addr);
2178
a6fe9ed4
JM
2179 /* Convert a possibly large offset to a signed value within the
2180 range of the target address space. */
372d6395 2181 address_mode = get_address_mode (memref);
d4ebfa65 2182 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2183 if (HOST_BITS_PER_WIDE_INT > pbits)
2184 {
2185 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2186 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2187 >> shift);
2188 }
2189
5ef0b50d 2190 if (adjust_address)
4a78c787
RH
2191 {
2192 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2193 object, we can merge it into the LO_SUM. */
2194 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2195 && offset >= 0
2196 && (unsigned HOST_WIDE_INT) offset
2197 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2198 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2199 plus_constant (address_mode,
2200 XEXP (addr, 1), offset));
0207fa90
EB
2201#ifdef POINTERS_EXTEND_UNSIGNED
2202 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2203 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2204 the fact that pointers are not allowed to overflow. */
2205 else if (POINTERS_EXTEND_UNSIGNED > 0
2206 && GET_CODE (addr) == ZERO_EXTEND
2207 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2208 && trunc_int_for_mode (offset, pointer_mode) == offset)
2209 addr = gen_rtx_ZERO_EXTEND (address_mode,
2210 plus_constant (pointer_mode,
2211 XEXP (addr, 0), offset));
2212#endif
4a78c787 2213 else
0a81f074 2214 addr = plus_constant (address_mode, addr, offset);
4a78c787 2215 }
823e3574 2216
23b33725 2217 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2218
09efeca1
PB
2219 /* If the address is a REG, change_address_1 rightfully returns memref,
2220 but this would destroy memref's MEM_ATTRS. */
2221 if (new_rtx == memref && offset != 0)
2222 new_rtx = copy_rtx (new_rtx);
2223
5ef0b50d
EB
2224 /* Conservatively drop the object if we don't know where we start from. */
2225 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2226 {
2227 attrs.expr = NULL_TREE;
2228 attrs.alias = 0;
2229 }
2230
738cc472
RK
2231 /* Compute the new values of the memory attributes due to this adjustment.
2232 We add the offsets and update the alignment. */
754c3d5d 2233 if (attrs.offset_known_p)
5ef0b50d
EB
2234 {
2235 attrs.offset += offset;
2236
2237 /* Drop the object if the new left end is not within its bounds. */
2238 if (adjust_object && attrs.offset < 0)
2239 {
2240 attrs.expr = NULL_TREE;
2241 attrs.alias = 0;
2242 }
2243 }
738cc472 2244
03bf2c23
RK
2245 /* Compute the new alignment by taking the MIN of the alignment and the
2246 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2247 if zero. */
2248 if (offset != 0)
f12144dd
RS
2249 {
2250 max_align = (offset & -offset) * BITS_PER_UNIT;
2251 attrs.align = MIN (attrs.align, max_align);
2252 }
738cc472 2253
5f2cbd0d 2254 if (size)
754c3d5d 2255 {
5ef0b50d 2256 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2257 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2258 {
2259 attrs.expr = NULL_TREE;
2260 attrs.alias = 0;
2261 }
754c3d5d 2262 attrs.size_known_p = true;
5f2cbd0d 2263 attrs.size = size;
754c3d5d
RS
2264 }
2265 else if (attrs.size_known_p)
5ef0b50d 2266 {
5f2cbd0d 2267 gcc_assert (!adjust_object);
5ef0b50d 2268 attrs.size -= offset;
5f2cbd0d
RS
2269 /* ??? The store_by_pieces machinery generates negative sizes,
2270 so don't assert for that here. */
5ef0b50d 2271 }
10b76d73 2272
f12144dd 2273 set_mem_attrs (new_rtx, &attrs);
738cc472 2274
60564289 2275 return new_rtx;
f1ec5147
RK
2276}
2277
630036c6
JJ
2278/* Return a memory reference like MEMREF, but with its mode changed
2279 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2280 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2281 nonzero, the memory address is forced to be valid. */
2282
2283rtx
502b8322
AJ
2284adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2285 HOST_WIDE_INT offset, int validate)
630036c6 2286{
23b33725 2287 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2288 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2289}
2290
8ac61af7
RK
2291/* Return a memory reference like MEMREF, but whose address is changed by
2292 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2293 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2294
2295rtx
502b8322 2296offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2297{
60564289 2298 rtx new_rtx, addr = XEXP (memref, 0);
f12144dd 2299 enum machine_mode address_mode;
754c3d5d 2300 struct mem_attrs attrs, *defattrs;
e3c8ea67 2301
f12144dd 2302 attrs = *get_mem_attrs (memref);
372d6395 2303 address_mode = get_address_mode (memref);
d4ebfa65 2304 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2305
68252e27 2306 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2307 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2308
2309 However, if we did go and rearrange things, we can wind up not
2310 being able to recognize the magic around pic_offset_table_rtx.
2311 This stuff is fragile, and is yet another example of why it is
2312 bad to expose PIC machinery too early. */
f12144dd
RS
2313 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2314 attrs.addrspace)
e3c8ea67
RH
2315 && GET_CODE (addr) == PLUS
2316 && XEXP (addr, 0) == pic_offset_table_rtx)
2317 {
2318 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2319 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2320 }
2321
60564289 2322 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2323 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2324
fdb1c7b3 2325 /* If there are no changes, just return the original memory reference. */
60564289
KG
2326 if (new_rtx == memref)
2327 return new_rtx;
fdb1c7b3 2328
0d4903b8
RK
2329 /* Update the alignment to reflect the offset. Reset the offset, which
2330 we don't know. */
754c3d5d
RS
2331 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2332 attrs.offset_known_p = false;
2333 attrs.size_known_p = defattrs->size_known_p;
2334 attrs.size = defattrs->size;
f12144dd
RS
2335 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2336 set_mem_attrs (new_rtx, &attrs);
60564289 2337 return new_rtx;
0d4903b8 2338}
68252e27 2339
792760b9
RK
2340/* Return a memory reference like MEMREF, but with its address changed to
2341 ADDR. The caller is asserting that the actual piece of memory pointed
2342 to is the same, just the form of the address is being changed, such as
23b33725
RS
2343 by putting something into a register. INPLACE is true if any changes
2344 can be made directly to MEMREF or false if MEMREF must be treated as
2345 immutable. */
792760b9
RK
2346
2347rtx
23b33725 2348replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2349{
738cc472
RK
2350 /* change_address_1 copies the memory attribute structure without change
2351 and that's exactly what we want here. */
40c0668b 2352 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2353 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2354}
738cc472 2355
f1ec5147
RK
2356/* Likewise, but the reference is not required to be valid. */
2357
2358rtx
23b33725 2359replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2360{
23b33725 2361 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2362}
e7dfe4bb
RH
2363
2364/* Return a memory reference like MEMREF, but with its mode widened to
2365 MODE and offset by OFFSET. This would be used by targets that e.g.
2366 cannot issue QImode memory operations and have to use SImode memory
2367 operations plus masking logic. */
2368
2369rtx
502b8322 2370widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2371{
5f2cbd0d 2372 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2373 struct mem_attrs attrs;
e7dfe4bb
RH
2374 unsigned int size = GET_MODE_SIZE (mode);
2375
fdb1c7b3 2376 /* If there are no changes, just return the original memory reference. */
60564289
KG
2377 if (new_rtx == memref)
2378 return new_rtx;
fdb1c7b3 2379
f12144dd
RS
2380 attrs = *get_mem_attrs (new_rtx);
2381
e7dfe4bb
RH
2382 /* If we don't know what offset we were at within the expression, then
2383 we can't know if we've overstepped the bounds. */
754c3d5d 2384 if (! attrs.offset_known_p)
f12144dd 2385 attrs.expr = NULL_TREE;
e7dfe4bb 2386
f12144dd 2387 while (attrs.expr)
e7dfe4bb 2388 {
f12144dd 2389 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2390 {
f12144dd
RS
2391 tree field = TREE_OPERAND (attrs.expr, 1);
2392 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2393
2394 if (! DECL_SIZE_UNIT (field))
2395 {
f12144dd 2396 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2397 break;
2398 }
2399
2400 /* Is the field at least as large as the access? If so, ok,
2401 otherwise strip back to the containing structure. */
03667700
RK
2402 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2403 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2404 && attrs.offset >= 0)
e7dfe4bb
RH
2405 break;
2406
cc269bb6 2407 if (! tree_fits_uhwi_p (offset))
e7dfe4bb 2408 {
f12144dd 2409 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2410 break;
2411 }
2412
f12144dd 2413 attrs.expr = TREE_OPERAND (attrs.expr, 0);
ae7e9ddd
RS
2414 attrs.offset += tree_to_uhwi (offset);
2415 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2416 / BITS_PER_UNIT);
e7dfe4bb
RH
2417 }
2418 /* Similarly for the decl. */
f12144dd
RS
2419 else if (DECL_P (attrs.expr)
2420 && DECL_SIZE_UNIT (attrs.expr)
2421 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2422 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2423 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2424 break;
2425 else
2426 {
2427 /* The widened memory access overflows the expression, which means
2428 that it could alias another expression. Zap it. */
f12144dd 2429 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2430 break;
2431 }
2432 }
2433
f12144dd 2434 if (! attrs.expr)
754c3d5d 2435 attrs.offset_known_p = false;
e7dfe4bb
RH
2436
2437 /* The widened memory may alias other stuff, so zap the alias set. */
2438 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2439 attrs.alias = 0;
754c3d5d
RS
2440 attrs.size_known_p = true;
2441 attrs.size = size;
f12144dd 2442 set_mem_attrs (new_rtx, &attrs);
60564289 2443 return new_rtx;
e7dfe4bb 2444}
23b2ce53 2445\f
f6129d66
RH
2446/* A fake decl that is used as the MEM_EXPR of spill slots. */
2447static GTY(()) tree spill_slot_decl;
2448
3d7e23f6
RH
2449tree
2450get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2451{
2452 tree d = spill_slot_decl;
2453 rtx rd;
f12144dd 2454 struct mem_attrs attrs;
f6129d66 2455
3d7e23f6 2456 if (d || !force_build_p)
f6129d66
RH
2457 return d;
2458
c2255bc4
AH
2459 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2460 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2461 DECL_ARTIFICIAL (d) = 1;
2462 DECL_IGNORED_P (d) = 1;
2463 TREE_USED (d) = 1;
f6129d66
RH
2464 spill_slot_decl = d;
2465
2466 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2467 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2468 attrs = *mode_mem_attrs[(int) BLKmode];
2469 attrs.alias = new_alias_set ();
2470 attrs.expr = d;
2471 set_mem_attrs (rd, &attrs);
f6129d66
RH
2472 SET_DECL_RTL (d, rd);
2473
2474 return d;
2475}
2476
2477/* Given MEM, a result from assign_stack_local, fill in the memory
2478 attributes as appropriate for a register allocator spill slot.
2479 These slots are not aliasable by other memory. We arrange for
2480 them all to use a single MEM_EXPR, so that the aliasing code can
2481 work properly in the case of shared spill slots. */
2482
2483void
2484set_mem_attrs_for_spill (rtx mem)
2485{
f12144dd
RS
2486 struct mem_attrs attrs;
2487 rtx addr;
f6129d66 2488
f12144dd
RS
2489 attrs = *get_mem_attrs (mem);
2490 attrs.expr = get_spill_slot_decl (true);
2491 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2492 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2493
2494 /* We expect the incoming memory to be of the form:
2495 (mem:MODE (plus (reg sfp) (const_int offset)))
2496 with perhaps the plus missing for offset = 0. */
2497 addr = XEXP (mem, 0);
754c3d5d
RS
2498 attrs.offset_known_p = true;
2499 attrs.offset = 0;
f6129d66 2500 if (GET_CODE (addr) == PLUS
481683e1 2501 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2502 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2503
f12144dd 2504 set_mem_attrs (mem, &attrs);
f6129d66
RH
2505 MEM_NOTRAP_P (mem) = 1;
2506}
2507\f
23b2ce53
RS
2508/* Return a newly created CODE_LABEL rtx with a unique label number. */
2509
7dcc3ab5 2510rtx_code_label *
502b8322 2511gen_label_rtx (void)
23b2ce53 2512{
7dcc3ab5
DM
2513 return as_a <rtx_code_label *> (
2514 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2515 NULL, label_num++, NULL));
23b2ce53
RS
2516}
2517\f
2518/* For procedure integration. */
2519
23b2ce53 2520/* Install new pointers to the first and last insns in the chain.
86fe05e0 2521 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2522 Used for an inline-procedure after copying the insn chain. */
2523
2524void
fee3e72c 2525set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2526{
fee3e72c 2527 rtx_insn *insn;
86fe05e0 2528
5936d944
JH
2529 set_first_insn (first);
2530 set_last_insn (last);
86fe05e0
RK
2531 cur_insn_uid = 0;
2532
b5b8b0ac
AO
2533 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2534 {
2535 int debug_count = 0;
2536
2537 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2538 cur_debug_insn_uid = 0;
2539
2540 for (insn = first; insn; insn = NEXT_INSN (insn))
2541 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2542 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2543 else
2544 {
2545 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2546 if (DEBUG_INSN_P (insn))
2547 debug_count++;
2548 }
2549
2550 if (debug_count)
2551 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2552 else
2553 cur_debug_insn_uid++;
2554 }
2555 else
2556 for (insn = first; insn; insn = NEXT_INSN (insn))
2557 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2558
2559 cur_insn_uid++;
23b2ce53 2560}
23b2ce53 2561\f
750c9258 2562/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2563 structure. This routine should only be called once. */
23b2ce53 2564
fd743bc1 2565static void
6bb9bf63 2566unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2567{
d1b81779 2568 /* Unshare just about everything else. */
2c07f13b 2569 unshare_all_rtl_in_chain (insn);
750c9258 2570
23b2ce53
RS
2571 /* Make sure the addresses of stack slots found outside the insn chain
2572 (such as, in DECL_RTL of a variable) are not shared
2573 with the insn chain.
2574
2575 This special care is necessary when the stack slot MEM does not
2576 actually appear in the insn chain. If it does appear, its address
2577 is unshared from all else at that point. */
0f4783c7
DM
2578 stack_slot_list = safe_as_a <rtx_expr_list *> (
2579 copy_rtx_if_shared (stack_slot_list));
23b2ce53
RS
2580}
2581
750c9258 2582/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2583 structure, again. This is a fairly expensive thing to do so it
2584 should be done sparingly. */
2585
2586void
6bb9bf63 2587unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2588{
6bb9bf63 2589 rtx_insn *p;
624c87aa
RE
2590 tree decl;
2591
d1b81779 2592 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2593 if (INSN_P (p))
d1b81779
GK
2594 {
2595 reset_used_flags (PATTERN (p));
2596 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2597 if (CALL_P (p))
2598 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2599 }
624c87aa 2600
2d4aecb3 2601 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2602 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2603
624c87aa 2604 /* Make sure that virtual parameters are not shared. */
910ad8de 2605 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2606 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2607
2608 reset_used_flags (stack_slot_list);
2609
b4aaa77b 2610 unshare_all_rtl_1 (insn);
fd743bc1
PB
2611}
2612
c2924966 2613unsigned int
fd743bc1
PB
2614unshare_all_rtl (void)
2615{
b4aaa77b 2616 unshare_all_rtl_1 (get_insns ());
c2924966 2617 return 0;
d1b81779
GK
2618}
2619
ef330312 2620
2c07f13b
JH
2621/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2622 Recursively does the same for subexpressions. */
2623
2624static void
2625verify_rtx_sharing (rtx orig, rtx insn)
2626{
2627 rtx x = orig;
2628 int i;
2629 enum rtx_code code;
2630 const char *format_ptr;
2631
2632 if (x == 0)
2633 return;
2634
2635 code = GET_CODE (x);
2636
2637 /* These types may be freely shared. */
2638
2639 switch (code)
2640 {
2641 case REG:
0ca5af51
AO
2642 case DEBUG_EXPR:
2643 case VALUE:
d8116890 2644 CASE_CONST_ANY:
2c07f13b
JH
2645 case SYMBOL_REF:
2646 case LABEL_REF:
2647 case CODE_LABEL:
2648 case PC:
2649 case CC0:
3810076b 2650 case RETURN:
26898771 2651 case SIMPLE_RETURN:
2c07f13b 2652 case SCRATCH:
3e89ed8d 2653 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2654 return;
3e89ed8d 2655 case CLOBBER:
c5c5ba89
JH
2656 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2657 clobbers or clobbers of hard registers that originated as pseudos.
2658 This is needed to allow safe register renaming. */
2659 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2660 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2661 return;
2662 break;
2c07f13b
JH
2663
2664 case CONST:
6fb5fa3c 2665 if (shared_const_p (orig))
2c07f13b
JH
2666 return;
2667 break;
2668
2669 case MEM:
2670 /* A MEM is allowed to be shared if its address is constant. */
2671 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2672 || reload_completed || reload_in_progress)
2673 return;
2674
2675 break;
2676
2677 default:
2678 break;
2679 }
2680
2681 /* This rtx may not be shared. If it has already been seen,
2682 replace it with a copy of itself. */
1a2caa7a 2683#ifdef ENABLE_CHECKING
2c07f13b
JH
2684 if (RTX_FLAG (x, used))
2685 {
ab532386 2686 error ("invalid rtl sharing found in the insn");
2c07f13b 2687 debug_rtx (insn);
ab532386 2688 error ("shared rtx");
2c07f13b 2689 debug_rtx (x);
ab532386 2690 internal_error ("internal consistency failure");
2c07f13b 2691 }
1a2caa7a
NS
2692#endif
2693 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2694
2c07f13b
JH
2695 RTX_FLAG (x, used) = 1;
2696
6614fd40 2697 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2698
2699 format_ptr = GET_RTX_FORMAT (code);
2700
2701 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2702 {
2703 switch (*format_ptr++)
2704 {
2705 case 'e':
2706 verify_rtx_sharing (XEXP (x, i), insn);
2707 break;
2708
2709 case 'E':
2710 if (XVEC (x, i) != NULL)
2711 {
2712 int j;
2713 int len = XVECLEN (x, i);
2714
2715 for (j = 0; j < len; j++)
2716 {
1a2caa7a
NS
2717 /* We allow sharing of ASM_OPERANDS inside single
2718 instruction. */
2c07f13b 2719 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2720 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2721 == ASM_OPERANDS))
2c07f13b
JH
2722 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2723 else
2724 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2725 }
2726 }
2727 break;
2728 }
2729 }
2730 return;
2731}
2732
0e0f87d4
SB
2733/* Reset used-flags for INSN. */
2734
2735static void
2736reset_insn_used_flags (rtx insn)
2737{
2738 gcc_assert (INSN_P (insn));
2739 reset_used_flags (PATTERN (insn));
2740 reset_used_flags (REG_NOTES (insn));
2741 if (CALL_P (insn))
2742 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2743}
2744
a24243a0 2745/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2746
a24243a0
AK
2747static void
2748reset_all_used_flags (void)
2c07f13b 2749{
dc01c3d1 2750 rtx_insn *p;
2c07f13b
JH
2751
2752 for (p = get_insns (); p; p = NEXT_INSN (p))
2753 if (INSN_P (p))
2754 {
0e0f87d4
SB
2755 rtx pat = PATTERN (p);
2756 if (GET_CODE (pat) != SEQUENCE)
2757 reset_insn_used_flags (p);
2758 else
2954a813 2759 {
0e0f87d4
SB
2760 gcc_assert (REG_NOTES (p) == NULL);
2761 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2762 {
2763 rtx insn = XVECEXP (pat, 0, i);
2764 if (INSN_P (insn))
2765 reset_insn_used_flags (insn);
2766 }
2954a813 2767 }
2c07f13b 2768 }
a24243a0
AK
2769}
2770
0e0f87d4
SB
2771/* Verify sharing in INSN. */
2772
2773static void
2774verify_insn_sharing (rtx insn)
2775{
2776 gcc_assert (INSN_P (insn));
2777 reset_used_flags (PATTERN (insn));
2778 reset_used_flags (REG_NOTES (insn));
2779 if (CALL_P (insn))
2780 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2781}
2782
a24243a0
AK
2783/* Go through all the RTL insn bodies and check that there is no unexpected
2784 sharing in between the subexpressions. */
2785
2786DEBUG_FUNCTION void
2787verify_rtl_sharing (void)
2788{
dc01c3d1 2789 rtx_insn *p;
a24243a0
AK
2790
2791 timevar_push (TV_VERIFY_RTL_SHARING);
2792
2793 reset_all_used_flags ();
2c07f13b
JH
2794
2795 for (p = get_insns (); p; p = NEXT_INSN (p))
2796 if (INSN_P (p))
2797 {
0e0f87d4
SB
2798 rtx pat = PATTERN (p);
2799 if (GET_CODE (pat) != SEQUENCE)
2800 verify_insn_sharing (p);
2801 else
2802 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2803 {
2804 rtx insn = XVECEXP (pat, 0, i);
2805 if (INSN_P (insn))
2806 verify_insn_sharing (insn);
2807 }
2c07f13b 2808 }
a222c01a 2809
a24243a0
AK
2810 reset_all_used_flags ();
2811
a222c01a 2812 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2813}
2814
d1b81779
GK
2815/* Go through all the RTL insn bodies and copy any invalid shared structure.
2816 Assumes the mark bits are cleared at entry. */
2817
2c07f13b 2818void
dc01c3d1 2819unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
2820{
2821 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2822 if (INSN_P (insn))
d1b81779
GK
2823 {
2824 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2825 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2826 if (CALL_P (insn))
2827 CALL_INSN_FUNCTION_USAGE (insn)
2828 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2829 }
2830}
2831
2d4aecb3 2832/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2833 shared. We never replace the DECL_RTLs themselves with a copy,
2834 but expressions mentioned into a DECL_RTL cannot be shared with
2835 expressions in the instruction stream.
2836
2837 Note that reload may convert pseudo registers into memories in-place.
2838 Pseudo registers are always shared, but MEMs never are. Thus if we
2839 reset the used flags on MEMs in the instruction stream, we must set
2840 them again on MEMs that appear in DECL_RTLs. */
2841
2d4aecb3 2842static void
5eb2a9f2 2843set_used_decls (tree blk)
2d4aecb3
AO
2844{
2845 tree t;
2846
2847 /* Mark decls. */
910ad8de 2848 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2849 if (DECL_RTL_SET_P (t))
5eb2a9f2 2850 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2851
2852 /* Now process sub-blocks. */
87caf699 2853 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2854 set_used_decls (t);
2d4aecb3
AO
2855}
2856
23b2ce53 2857/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2858 Recursively does the same for subexpressions. Uses
2859 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2860
2861rtx
502b8322 2862copy_rtx_if_shared (rtx orig)
23b2ce53 2863{
32b32b16
AP
2864 copy_rtx_if_shared_1 (&orig);
2865 return orig;
2866}
2867
ff954f39
AP
2868/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2869 use. Recursively does the same for subexpressions. */
2870
32b32b16
AP
2871static void
2872copy_rtx_if_shared_1 (rtx *orig1)
2873{
2874 rtx x;
b3694847
SS
2875 int i;
2876 enum rtx_code code;
32b32b16 2877 rtx *last_ptr;
b3694847 2878 const char *format_ptr;
23b2ce53 2879 int copied = 0;
32b32b16
AP
2880 int length;
2881
2882 /* Repeat is used to turn tail-recursion into iteration. */
2883repeat:
2884 x = *orig1;
23b2ce53
RS
2885
2886 if (x == 0)
32b32b16 2887 return;
23b2ce53
RS
2888
2889 code = GET_CODE (x);
2890
2891 /* These types may be freely shared. */
2892
2893 switch (code)
2894 {
2895 case REG:
0ca5af51
AO
2896 case DEBUG_EXPR:
2897 case VALUE:
d8116890 2898 CASE_CONST_ANY:
23b2ce53 2899 case SYMBOL_REF:
2c07f13b 2900 case LABEL_REF:
23b2ce53
RS
2901 case CODE_LABEL:
2902 case PC:
2903 case CC0:
276e0224 2904 case RETURN:
26898771 2905 case SIMPLE_RETURN:
23b2ce53 2906 case SCRATCH:
0f41302f 2907 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2908 return;
3e89ed8d 2909 case CLOBBER:
c5c5ba89
JH
2910 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2911 clobbers or clobbers of hard registers that originated as pseudos.
2912 This is needed to allow safe register renaming. */
2913 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2914 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2915 return;
2916 break;
23b2ce53 2917
b851ea09 2918 case CONST:
6fb5fa3c 2919 if (shared_const_p (x))
32b32b16 2920 return;
b851ea09
RK
2921 break;
2922
b5b8b0ac 2923 case DEBUG_INSN:
23b2ce53
RS
2924 case INSN:
2925 case JUMP_INSN:
2926 case CALL_INSN:
2927 case NOTE:
23b2ce53
RS
2928 case BARRIER:
2929 /* The chain of insns is not being copied. */
32b32b16 2930 return;
23b2ce53 2931
e9a25f70
JL
2932 default:
2933 break;
23b2ce53
RS
2934 }
2935
2936 /* This rtx may not be shared. If it has already been seen,
2937 replace it with a copy of itself. */
2938
2adc7f12 2939 if (RTX_FLAG (x, used))
23b2ce53 2940 {
aacd3885 2941 x = shallow_copy_rtx (x);
23b2ce53
RS
2942 copied = 1;
2943 }
2adc7f12 2944 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2945
2946 /* Now scan the subexpressions recursively.
2947 We can store any replaced subexpressions directly into X
2948 since we know X is not shared! Any vectors in X
2949 must be copied if X was copied. */
2950
2951 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2952 length = GET_RTX_LENGTH (code);
2953 last_ptr = NULL;
b8698a0f 2954
32b32b16 2955 for (i = 0; i < length; i++)
23b2ce53
RS
2956 {
2957 switch (*format_ptr++)
2958 {
2959 case 'e':
32b32b16
AP
2960 if (last_ptr)
2961 copy_rtx_if_shared_1 (last_ptr);
2962 last_ptr = &XEXP (x, i);
23b2ce53
RS
2963 break;
2964
2965 case 'E':
2966 if (XVEC (x, i) != NULL)
2967 {
b3694847 2968 int j;
f0722107 2969 int len = XVECLEN (x, i);
b8698a0f 2970
6614fd40
KH
2971 /* Copy the vector iff I copied the rtx and the length
2972 is nonzero. */
f0722107 2973 if (copied && len > 0)
8f985ec4 2974 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2975
5d3cc252 2976 /* Call recursively on all inside the vector. */
f0722107 2977 for (j = 0; j < len; j++)
32b32b16
AP
2978 {
2979 if (last_ptr)
2980 copy_rtx_if_shared_1 (last_ptr);
2981 last_ptr = &XVECEXP (x, i, j);
2982 }
23b2ce53
RS
2983 }
2984 break;
2985 }
2986 }
32b32b16
AP
2987 *orig1 = x;
2988 if (last_ptr)
2989 {
2990 orig1 = last_ptr;
2991 goto repeat;
2992 }
2993 return;
23b2ce53
RS
2994}
2995
76369a82 2996/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 2997
76369a82
NF
2998static void
2999mark_used_flags (rtx x, int flag)
23b2ce53 3000{
b3694847
SS
3001 int i, j;
3002 enum rtx_code code;
3003 const char *format_ptr;
32b32b16 3004 int length;
23b2ce53 3005
32b32b16
AP
3006 /* Repeat is used to turn tail-recursion into iteration. */
3007repeat:
23b2ce53
RS
3008 if (x == 0)
3009 return;
3010
3011 code = GET_CODE (x);
3012
9faa82d8 3013 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3014 for them. */
3015
3016 switch (code)
3017 {
3018 case REG:
0ca5af51
AO
3019 case DEBUG_EXPR:
3020 case VALUE:
d8116890 3021 CASE_CONST_ANY:
23b2ce53
RS
3022 case SYMBOL_REF:
3023 case CODE_LABEL:
3024 case PC:
3025 case CC0:
276e0224 3026 case RETURN:
26898771 3027 case SIMPLE_RETURN:
23b2ce53
RS
3028 return;
3029
b5b8b0ac 3030 case DEBUG_INSN:
23b2ce53
RS
3031 case INSN:
3032 case JUMP_INSN:
3033 case CALL_INSN:
3034 case NOTE:
3035 case LABEL_REF:
3036 case BARRIER:
3037 /* The chain of insns is not being copied. */
3038 return;
750c9258 3039
e9a25f70
JL
3040 default:
3041 break;
23b2ce53
RS
3042 }
3043
76369a82 3044 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3045
3046 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3047 length = GET_RTX_LENGTH (code);
b8698a0f 3048
32b32b16 3049 for (i = 0; i < length; i++)
23b2ce53
RS
3050 {
3051 switch (*format_ptr++)
3052 {
3053 case 'e':
32b32b16
AP
3054 if (i == length-1)
3055 {
3056 x = XEXP (x, i);
3057 goto repeat;
3058 }
76369a82 3059 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3060 break;
3061
3062 case 'E':
3063 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3064 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3065 break;
3066 }
3067 }
3068}
2c07f13b 3069
76369a82 3070/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3071 to look for shared sub-parts. */
3072
3073void
76369a82 3074reset_used_flags (rtx x)
2c07f13b 3075{
76369a82
NF
3076 mark_used_flags (x, 0);
3077}
2c07f13b 3078
76369a82
NF
3079/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3080 to look for shared sub-parts. */
2c07f13b 3081
76369a82
NF
3082void
3083set_used_flags (rtx x)
3084{
3085 mark_used_flags (x, 1);
2c07f13b 3086}
23b2ce53
RS
3087\f
3088/* Copy X if necessary so that it won't be altered by changes in OTHER.
3089 Return X or the rtx for the pseudo reg the value of X was copied into.
3090 OTHER must be valid as a SET_DEST. */
3091
3092rtx
502b8322 3093make_safe_from (rtx x, rtx other)
23b2ce53
RS
3094{
3095 while (1)
3096 switch (GET_CODE (other))
3097 {
3098 case SUBREG:
3099 other = SUBREG_REG (other);
3100 break;
3101 case STRICT_LOW_PART:
3102 case SIGN_EXTEND:
3103 case ZERO_EXTEND:
3104 other = XEXP (other, 0);
3105 break;
3106 default:
3107 goto done;
3108 }
3109 done:
3c0cb5de 3110 if ((MEM_P (other)
23b2ce53 3111 && ! CONSTANT_P (x)
f8cfc6aa 3112 && !REG_P (x)
23b2ce53 3113 && GET_CODE (x) != SUBREG)
f8cfc6aa 3114 || (REG_P (other)
23b2ce53
RS
3115 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3116 || reg_mentioned_p (other, x))))
3117 {
3118 rtx temp = gen_reg_rtx (GET_MODE (x));
3119 emit_move_insn (temp, x);
3120 return temp;
3121 }
3122 return x;
3123}
3124\f
3125/* Emission of insns (adding them to the doubly-linked list). */
3126
23b2ce53
RS
3127/* Return the last insn emitted, even if it is in a sequence now pushed. */
3128
db76cf1e 3129rtx_insn *
502b8322 3130get_last_insn_anywhere (void)
23b2ce53
RS
3131{
3132 struct sequence_stack *stack;
5936d944
JH
3133 if (get_last_insn ())
3134 return get_last_insn ();
49ad7cfa 3135 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
3136 if (stack->last != 0)
3137 return stack->last;
3138 return 0;
3139}
3140
2a496e8b
JDA
3141/* Return the first nonnote insn emitted in current sequence or current
3142 function. This routine looks inside SEQUENCEs. */
3143
3144rtx
502b8322 3145get_first_nonnote_insn (void)
2a496e8b 3146{
dc01c3d1 3147 rtx_insn *insn = get_insns ();
91373fe8
JDA
3148
3149 if (insn)
3150 {
3151 if (NOTE_P (insn))
3152 for (insn = next_insn (insn);
3153 insn && NOTE_P (insn);
3154 insn = next_insn (insn))
3155 continue;
3156 else
3157 {
2ca202e7 3158 if (NONJUMP_INSN_P (insn)
91373fe8 3159 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3160 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3161 }
3162 }
2a496e8b
JDA
3163
3164 return insn;
3165}
3166
3167/* Return the last nonnote insn emitted in current sequence or current
3168 function. This routine looks inside SEQUENCEs. */
3169
3170rtx
502b8322 3171get_last_nonnote_insn (void)
2a496e8b 3172{
dc01c3d1 3173 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3174
3175 if (insn)
3176 {
3177 if (NOTE_P (insn))
3178 for (insn = previous_insn (insn);
3179 insn && NOTE_P (insn);
3180 insn = previous_insn (insn))
3181 continue;
3182 else
3183 {
dc01c3d1
DM
3184 if (NONJUMP_INSN_P (insn))
3185 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3186 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3187 }
3188 }
2a496e8b
JDA
3189
3190 return insn;
3191}
3192
b5b8b0ac
AO
3193/* Return the number of actual (non-debug) insns emitted in this
3194 function. */
3195
3196int
3197get_max_insn_count (void)
3198{
3199 int n = cur_insn_uid;
3200
3201 /* The table size must be stable across -g, to avoid codegen
3202 differences due to debug insns, and not be affected by
3203 -fmin-insn-uid, to avoid excessive table size and to simplify
3204 debugging of -fcompare-debug failures. */
3205 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3206 n -= cur_debug_insn_uid;
3207 else
3208 n -= MIN_NONDEBUG_INSN_UID;
3209
3210 return n;
3211}
3212
23b2ce53
RS
3213\f
3214/* Return the next insn. If it is a SEQUENCE, return the first insn
3215 of the sequence. */
3216
eb51c837 3217rtx_insn *
dc01c3d1 3218next_insn (rtx uncast_insn)
23b2ce53 3219{
dc01c3d1 3220 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
75547801
KG
3221 if (insn)
3222 {
3223 insn = NEXT_INSN (insn);
3224 if (insn && NONJUMP_INSN_P (insn)
3225 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3226 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3227 }
23b2ce53 3228
dc01c3d1 3229 return insn;
23b2ce53
RS
3230}
3231
3232/* Return the previous insn. If it is a SEQUENCE, return the last insn
3233 of the sequence. */
3234
eb51c837 3235rtx_insn *
dc01c3d1 3236previous_insn (rtx uncast_insn)
23b2ce53 3237{
dc01c3d1 3238 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
75547801
KG
3239 if (insn)
3240 {
3241 insn = PREV_INSN (insn);
dc01c3d1
DM
3242 if (insn && NONJUMP_INSN_P (insn))
3243 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3244 insn = seq->insn (seq->len () - 1);
75547801 3245 }
23b2ce53 3246
dc01c3d1 3247 return insn;
23b2ce53
RS
3248}
3249
3250/* Return the next insn after INSN that is not a NOTE. This routine does not
3251 look inside SEQUENCEs. */
3252
eb51c837 3253rtx_insn *
dc01c3d1 3254next_nonnote_insn (rtx uncast_insn)
23b2ce53 3255{
dc01c3d1 3256 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
75547801
KG
3257 while (insn)
3258 {
3259 insn = NEXT_INSN (insn);
3260 if (insn == 0 || !NOTE_P (insn))
3261 break;
3262 }
23b2ce53 3263
dc01c3d1 3264 return insn;
23b2ce53
RS
3265}
3266
1e211590
DD
3267/* Return the next insn after INSN that is not a NOTE, but stop the
3268 search before we enter another basic block. This routine does not
3269 look inside SEQUENCEs. */
3270
eb51c837 3271rtx_insn *
dc01c3d1 3272next_nonnote_insn_bb (rtx uncast_insn)
1e211590 3273{
dc01c3d1
DM
3274 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3275
1e211590
DD
3276 while (insn)
3277 {
3278 insn = NEXT_INSN (insn);
3279 if (insn == 0 || !NOTE_P (insn))
3280 break;
3281 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3282 return NULL;
1e211590
DD
3283 }
3284
dc01c3d1 3285 return insn;
1e211590
DD
3286}
3287
23b2ce53
RS
3288/* Return the previous insn before INSN that is not a NOTE. This routine does
3289 not look inside SEQUENCEs. */
3290
eb51c837 3291rtx_insn *
dc01c3d1 3292prev_nonnote_insn (rtx uncast_insn)
23b2ce53 3293{
dc01c3d1
DM
3294 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3295
75547801
KG
3296 while (insn)
3297 {
3298 insn = PREV_INSN (insn);
3299 if (insn == 0 || !NOTE_P (insn))
3300 break;
3301 }
23b2ce53 3302
dc01c3d1 3303 return insn;
23b2ce53
RS
3304}
3305
896aa4ea
DD
3306/* Return the previous insn before INSN that is not a NOTE, but stop
3307 the search before we enter another basic block. This routine does
3308 not look inside SEQUENCEs. */
3309
eb51c837 3310rtx_insn *
dc01c3d1 3311prev_nonnote_insn_bb (rtx uncast_insn)
896aa4ea 3312{
dc01c3d1
DM
3313 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3314
896aa4ea
DD
3315 while (insn)
3316 {
3317 insn = PREV_INSN (insn);
3318 if (insn == 0 || !NOTE_P (insn))
3319 break;
3320 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3321 return NULL;
896aa4ea
DD
3322 }
3323
dc01c3d1 3324 return insn;
896aa4ea
DD
3325}
3326
b5b8b0ac
AO
3327/* Return the next insn after INSN that is not a DEBUG_INSN. This
3328 routine does not look inside SEQUENCEs. */
3329
eb51c837 3330rtx_insn *
dc01c3d1 3331next_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3332{
dc01c3d1
DM
3333 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3334
b5b8b0ac
AO
3335 while (insn)
3336 {
3337 insn = NEXT_INSN (insn);
3338 if (insn == 0 || !DEBUG_INSN_P (insn))
3339 break;
3340 }
3341
dc01c3d1 3342 return insn;
b5b8b0ac
AO
3343}
3344
3345/* Return the previous insn before INSN that is not a DEBUG_INSN.
3346 This routine does not look inside SEQUENCEs. */
3347
eb51c837 3348rtx_insn *
dc01c3d1 3349prev_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3350{
dc01c3d1
DM
3351 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3352
b5b8b0ac
AO
3353 while (insn)
3354 {
3355 insn = PREV_INSN (insn);
3356 if (insn == 0 || !DEBUG_INSN_P (insn))
3357 break;
3358 }
3359
dc01c3d1 3360 return insn;
b5b8b0ac
AO
3361}
3362
f0fc0803
JJ
3363/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3364 This routine does not look inside SEQUENCEs. */
3365
eb51c837 3366rtx_insn *
dc01c3d1 3367next_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3368{
dc01c3d1
DM
3369 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3370
f0fc0803
JJ
3371 while (insn)
3372 {
3373 insn = NEXT_INSN (insn);
3374 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3375 break;
3376 }
3377
dc01c3d1 3378 return insn;
f0fc0803
JJ
3379}
3380
3381/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3382 This routine does not look inside SEQUENCEs. */
3383
eb51c837 3384rtx_insn *
dc01c3d1 3385prev_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3386{
dc01c3d1
DM
3387 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3388
f0fc0803
JJ
3389 while (insn)
3390 {
3391 insn = PREV_INSN (insn);
3392 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3393 break;
3394 }
3395
dc01c3d1 3396 return insn;
f0fc0803
JJ
3397}
3398
23b2ce53
RS
3399/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3400 or 0, if there is none. This routine does not look inside
0f41302f 3401 SEQUENCEs. */
23b2ce53 3402
eb51c837 3403rtx_insn *
dc01c3d1 3404next_real_insn (rtx uncast_insn)
23b2ce53 3405{
dc01c3d1
DM
3406 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3407
75547801
KG
3408 while (insn)
3409 {
3410 insn = NEXT_INSN (insn);
3411 if (insn == 0 || INSN_P (insn))
3412 break;
3413 }
23b2ce53 3414
dc01c3d1 3415 return insn;
23b2ce53
RS
3416}
3417
3418/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3419 or 0, if there is none. This routine does not look inside
3420 SEQUENCEs. */
3421
eb51c837 3422rtx_insn *
dc01c3d1 3423prev_real_insn (rtx uncast_insn)
23b2ce53 3424{
dc01c3d1
DM
3425 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3426
75547801
KG
3427 while (insn)
3428 {
3429 insn = PREV_INSN (insn);
3430 if (insn == 0 || INSN_P (insn))
3431 break;
3432 }
23b2ce53 3433
dc01c3d1 3434 return insn;
23b2ce53
RS
3435}
3436
ee960939
OH
3437/* Return the last CALL_INSN in the current list, or 0 if there is none.
3438 This routine does not look inside SEQUENCEs. */
3439
049cfc4a 3440rtx_call_insn *
502b8322 3441last_call_insn (void)
ee960939 3442{
049cfc4a 3443 rtx_insn *insn;
ee960939
OH
3444
3445 for (insn = get_last_insn ();
4b4bf941 3446 insn && !CALL_P (insn);
ee960939
OH
3447 insn = PREV_INSN (insn))
3448 ;
3449
049cfc4a 3450 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3451}
3452
23b2ce53 3453/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3454 does not look inside SEQUENCEs. After reload this also skips over
3455 standalone USE and CLOBBER insn. */
23b2ce53 3456
69732dcb 3457int
4f588890 3458active_insn_p (const_rtx insn)
69732dcb 3459{
4b4bf941 3460 return (CALL_P (insn) || JUMP_P (insn)
39718607 3461 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3462 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3463 && (! reload_completed
3464 || (GET_CODE (PATTERN (insn)) != USE
3465 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3466}
3467
eb51c837 3468rtx_insn *
dc01c3d1 3469next_active_insn (rtx uncast_insn)
23b2ce53 3470{
dc01c3d1
DM
3471 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3472
75547801
KG
3473 while (insn)
3474 {
3475 insn = NEXT_INSN (insn);
3476 if (insn == 0 || active_insn_p (insn))
3477 break;
3478 }
23b2ce53 3479
dc01c3d1 3480 return insn;
23b2ce53
RS
3481}
3482
3483/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3484 does not look inside SEQUENCEs. After reload this also skips over
3485 standalone USE and CLOBBER insn. */
23b2ce53 3486
eb51c837 3487rtx_insn *
dc01c3d1 3488prev_active_insn (rtx uncast_insn)
23b2ce53 3489{
dc01c3d1
DM
3490 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3491
75547801
KG
3492 while (insn)
3493 {
3494 insn = PREV_INSN (insn);
3495 if (insn == 0 || active_insn_p (insn))
3496 break;
3497 }
23b2ce53 3498
dc01c3d1 3499 return insn;
23b2ce53 3500}
23b2ce53
RS
3501\f
3502#ifdef HAVE_cc0
3503/* Return the next insn that uses CC0 after INSN, which is assumed to
3504 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3505 applied to the result of this function should yield INSN).
3506
3507 Normally, this is simply the next insn. However, if a REG_CC_USER note
3508 is present, it contains the insn that uses CC0.
3509
3510 Return 0 if we can't find the insn. */
3511
75b46023 3512rtx_insn *
dc01c3d1 3513next_cc0_user (rtx uncast_insn)
23b2ce53 3514{
dc01c3d1
DM
3515 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3516
906c4e36 3517 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3518
3519 if (note)
75b46023 3520 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3521
3522 insn = next_nonnote_insn (insn);
4b4bf941 3523 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3524 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3525
2c3c49de 3526 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3527 return insn;
23b2ce53
RS
3528
3529 return 0;
3530}
3531
3532/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3533 note, it is the previous insn. */
3534
75b46023 3535rtx_insn *
dc01c3d1 3536prev_cc0_setter (rtx uncast_insn)
23b2ce53 3537{
dc01c3d1
DM
3538 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3539
906c4e36 3540 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3541
3542 if (note)
75b46023 3543 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3544
3545 insn = prev_nonnote_insn (insn);
5b0264cb 3546 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3547
dc01c3d1 3548 return insn;
23b2ce53
RS
3549}
3550#endif
e5bef2e4 3551
594f8779
RZ
3552#ifdef AUTO_INC_DEC
3553/* Find a RTX_AUTOINC class rtx which matches DATA. */
3554
3555static int
9021b8ec 3556find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3557{
9021b8ec
RS
3558 subrtx_iterator::array_type array;
3559 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3560 {
9021b8ec
RS
3561 const_rtx x = *iter;
3562 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3563 && rtx_equal_p (reg, XEXP (x, 0)))
3564 return true;
594f8779 3565 }
9021b8ec 3566 return false;
594f8779
RZ
3567}
3568#endif
3569
e5bef2e4
HB
3570/* Increment the label uses for all labels present in rtx. */
3571
3572static void
502b8322 3573mark_label_nuses (rtx x)
e5bef2e4 3574{
b3694847
SS
3575 enum rtx_code code;
3576 int i, j;
3577 const char *fmt;
e5bef2e4
HB
3578
3579 code = GET_CODE (x);
7537fc90 3580 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3581 LABEL_NUSES (XEXP (x, 0))++;
3582
3583 fmt = GET_RTX_FORMAT (code);
3584 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3585 {
3586 if (fmt[i] == 'e')
0fb7aeda 3587 mark_label_nuses (XEXP (x, i));
e5bef2e4 3588 else if (fmt[i] == 'E')
0fb7aeda 3589 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3590 mark_label_nuses (XVECEXP (x, i, j));
3591 }
3592}
3593
23b2ce53
RS
3594\f
3595/* Try splitting insns that can be split for better scheduling.
3596 PAT is the pattern which might split.
3597 TRIAL is the insn providing PAT.
cc2902df 3598 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3599
3600 If this routine succeeds in splitting, it returns the first or last
11147ebe 3601 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3602 returns TRIAL. If the insn to be returned can be split, it will be. */
3603
53f04688 3604rtx_insn *
dc01c3d1 3605try_split (rtx pat, rtx uncast_trial, int last)
23b2ce53 3606{
dc01c3d1 3607 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
53f04688
DM
3608 rtx_insn *before = PREV_INSN (trial);
3609 rtx_insn *after = NEXT_INSN (trial);
23b2ce53 3610 int has_barrier = 0;
dc01c3d1
DM
3611 rtx note;
3612 rtx_insn *seq, *tem;
6b24c259 3613 int probability;
dc01c3d1 3614 rtx_insn *insn_last, *insn;
599aedd9 3615 int njumps = 0;
4f660b15 3616 rtx call_insn = NULL_RTX;
6b24c259 3617
cd9c1ca8
RH
3618 /* We're not good at redistributing frame information. */
3619 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3620 return trial;
cd9c1ca8 3621
6b24c259
JH
3622 if (any_condjump_p (trial)
3623 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
e5af9ddd 3624 split_branch_probability = XINT (note, 0);
6b24c259
JH
3625 probability = split_branch_probability;
3626
dc01c3d1 3627 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
6b24c259
JH
3628
3629 split_branch_probability = -1;
23b2ce53
RS
3630
3631 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3632 We may need to handle this specially. */
4b4bf941 3633 if (after && BARRIER_P (after))
23b2ce53
RS
3634 {
3635 has_barrier = 1;
3636 after = NEXT_INSN (after);
3637 }
3638
599aedd9 3639 if (!seq)
dc01c3d1 3640 return trial;
599aedd9
RH
3641
3642 /* Avoid infinite loop if any insn of the result matches
3643 the original pattern. */
3644 insn_last = seq;
3645 while (1)
23b2ce53 3646 {
599aedd9
RH
3647 if (INSN_P (insn_last)
3648 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3649 return trial;
599aedd9
RH
3650 if (!NEXT_INSN (insn_last))
3651 break;
3652 insn_last = NEXT_INSN (insn_last);
3653 }
750c9258 3654
6fb5fa3c
DB
3655 /* We will be adding the new sequence to the function. The splitters
3656 may have introduced invalid RTL sharing, so unshare the sequence now. */
3657 unshare_all_rtl_in_chain (seq);
3658
339ba33b 3659 /* Mark labels and copy flags. */
599aedd9
RH
3660 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3661 {
4b4bf941 3662 if (JUMP_P (insn))
599aedd9 3663 {
339ba33b
RS
3664 if (JUMP_P (trial))
3665 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3666 mark_jump_label (PATTERN (insn), insn, 0);
3667 njumps++;
3668 if (probability != -1
3669 && any_condjump_p (insn)
3670 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3671 {
599aedd9
RH
3672 /* We can preserve the REG_BR_PROB notes only if exactly
3673 one jump is created, otherwise the machine description
3674 is responsible for this step using
3675 split_branch_probability variable. */
5b0264cb 3676 gcc_assert (njumps == 1);
e5af9ddd 3677 add_int_reg_note (insn, REG_BR_PROB, probability);
2f937369 3678 }
599aedd9
RH
3679 }
3680 }
3681
3682 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3683 in SEQ and copy any additional information across. */
4b4bf941 3684 if (CALL_P (trial))
599aedd9
RH
3685 {
3686 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3687 if (CALL_P (insn))
599aedd9 3688 {
dc01c3d1
DM
3689 rtx_insn *next;
3690 rtx *p;
65712d5c 3691
4f660b15
RO
3692 gcc_assert (call_insn == NULL_RTX);
3693 call_insn = insn;
3694
65712d5c
RS
3695 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3696 target may have explicitly specified. */
3697 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3698 while (*p)
3699 p = &XEXP (*p, 1);
3700 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3701
3702 /* If the old call was a sibling call, the new one must
3703 be too. */
599aedd9 3704 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3705
3706 /* If the new call is the last instruction in the sequence,
3707 it will effectively replace the old call in-situ. Otherwise
3708 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3709 so that it comes immediately after the new call. */
3710 if (NEXT_INSN (insn))
65f3dedb
RS
3711 for (next = NEXT_INSN (trial);
3712 next && NOTE_P (next);
3713 next = NEXT_INSN (next))
3714 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3715 {
3716 remove_insn (next);
3717 add_insn_after (next, insn, NULL);
65f3dedb 3718 break;
65712d5c 3719 }
599aedd9
RH
3720 }
3721 }
4b5e8abe 3722
599aedd9
RH
3723 /* Copy notes, particularly those related to the CFG. */
3724 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3725 {
3726 switch (REG_NOTE_KIND (note))
3727 {
3728 case REG_EH_REGION:
1d65f45c 3729 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3730 break;
216183ce 3731
599aedd9
RH
3732 case REG_NORETURN:
3733 case REG_SETJMP:
0a35513e 3734 case REG_TM:
594f8779 3735 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3736 {
4b4bf941 3737 if (CALL_P (insn))
65c5f2a6 3738 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3739 }
599aedd9 3740 break;
d6e95df8 3741
599aedd9 3742 case REG_NON_LOCAL_GOTO:
594f8779 3743 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3744 {
4b4bf941 3745 if (JUMP_P (insn))
65c5f2a6 3746 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3747 }
599aedd9 3748 break;
e5bef2e4 3749
594f8779
RZ
3750#ifdef AUTO_INC_DEC
3751 case REG_INC:
3752 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3753 {
3754 rtx reg = XEXP (note, 0);
3755 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3756 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3757 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3758 }
3759 break;
3760#endif
3761
9a08d230
RH
3762 case REG_ARGS_SIZE:
3763 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3764 break;
3765
4f660b15
RO
3766 case REG_CALL_DECL:
3767 gcc_assert (call_insn != NULL_RTX);
3768 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3769 break;
3770
599aedd9
RH
3771 default:
3772 break;
23b2ce53 3773 }
599aedd9
RH
3774 }
3775
3776 /* If there are LABELS inside the split insns increment the
3777 usage count so we don't delete the label. */
cf7c4aa6 3778 if (INSN_P (trial))
599aedd9
RH
3779 {
3780 insn = insn_last;
3781 while (insn != NULL_RTX)
23b2ce53 3782 {
cf7c4aa6 3783 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3784 if (NONJUMP_INSN_P (insn))
599aedd9 3785 mark_label_nuses (PATTERN (insn));
23b2ce53 3786
599aedd9
RH
3787 insn = PREV_INSN (insn);
3788 }
23b2ce53
RS
3789 }
3790
5368224f 3791 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3792
3793 delete_insn (trial);
3794 if (has_barrier)
3795 emit_barrier_after (tem);
3796
3797 /* Recursively call try_split for each new insn created; by the
3798 time control returns here that insn will be fully split, so
3799 set LAST and continue from the insn after the one returned.
3800 We can't use next_active_insn here since AFTER may be a note.
3801 Ignore deleted insns, which can be occur if not optimizing. */
3802 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3803 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3804 tem = try_split (PATTERN (tem), tem, 1);
3805
3806 /* Return either the first or the last insn, depending on which was
3807 requested. */
3808 return last
5936d944 3809 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3810 : NEXT_INSN (before);
23b2ce53
RS
3811}
3812\f
3813/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3814 Store PATTERN in the pattern slots. */
23b2ce53 3815
167b9fae 3816rtx_insn *
502b8322 3817make_insn_raw (rtx pattern)
23b2ce53 3818{
167b9fae 3819 rtx_insn *insn;
23b2ce53 3820
167b9fae 3821 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 3822
43127294 3823 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3824 PATTERN (insn) = pattern;
3825 INSN_CODE (insn) = -1;
1632afca 3826 REG_NOTES (insn) = NULL;
5368224f 3827 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3828 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3829
47984720
NC
3830#ifdef ENABLE_RTL_CHECKING
3831 if (insn
2c3c49de 3832 && INSN_P (insn)
47984720
NC
3833 && (returnjump_p (insn)
3834 || (GET_CODE (insn) == SET
3835 && SET_DEST (insn) == pc_rtx)))
3836 {
d4ee4d25 3837 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3838 debug_rtx (insn);
3839 }
3840#endif
750c9258 3841
23b2ce53
RS
3842 return insn;
3843}
3844
b5b8b0ac
AO
3845/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3846
167b9fae 3847static rtx_insn *
b5b8b0ac
AO
3848make_debug_insn_raw (rtx pattern)
3849{
167b9fae 3850 rtx_debug_insn *insn;
b5b8b0ac 3851
167b9fae 3852 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
3853 INSN_UID (insn) = cur_debug_insn_uid++;
3854 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3855 INSN_UID (insn) = cur_insn_uid++;
3856
3857 PATTERN (insn) = pattern;
3858 INSN_CODE (insn) = -1;
3859 REG_NOTES (insn) = NULL;
5368224f 3860 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3861 BLOCK_FOR_INSN (insn) = NULL;
3862
3863 return insn;
3864}
3865
2f937369 3866/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3867
167b9fae 3868static rtx_insn *
502b8322 3869make_jump_insn_raw (rtx pattern)
23b2ce53 3870{
167b9fae 3871 rtx_jump_insn *insn;
23b2ce53 3872
167b9fae 3873 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 3874 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3875
3876 PATTERN (insn) = pattern;
3877 INSN_CODE (insn) = -1;
1632afca
RS
3878 REG_NOTES (insn) = NULL;
3879 JUMP_LABEL (insn) = NULL;
5368224f 3880 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3881 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3882
3883 return insn;
3884}
aff507f4 3885
2f937369 3886/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 3887
167b9fae 3888static rtx_insn *
502b8322 3889make_call_insn_raw (rtx pattern)
aff507f4 3890{
167b9fae 3891 rtx_call_insn *insn;
aff507f4 3892
167b9fae 3893 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
3894 INSN_UID (insn) = cur_insn_uid++;
3895
3896 PATTERN (insn) = pattern;
3897 INSN_CODE (insn) = -1;
aff507f4
RK
3898 REG_NOTES (insn) = NULL;
3899 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3900 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3901 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3902
3903 return insn;
3904}
96fba521
SB
3905
3906/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3907
66e8df53 3908static rtx_note *
96fba521
SB
3909make_note_raw (enum insn_note subtype)
3910{
3911 /* Some notes are never created this way at all. These notes are
3912 only created by patching out insns. */
3913 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3914 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3915
66e8df53 3916 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
3917 INSN_UID (note) = cur_insn_uid++;
3918 NOTE_KIND (note) = subtype;
3919 BLOCK_FOR_INSN (note) = NULL;
3920 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3921 return note;
3922}
23b2ce53 3923\f
96fba521
SB
3924/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3925 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3926 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3927
3928static inline void
9152e0aa 3929link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 3930{
0f82e5c9
DM
3931 SET_PREV_INSN (insn) = prev;
3932 SET_NEXT_INSN (insn) = next;
96fba521
SB
3933 if (prev != NULL)
3934 {
0f82e5c9 3935 SET_NEXT_INSN (prev) = insn;
96fba521
SB
3936 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3937 {
e6eda746
DM
3938 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3939 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
3940 }
3941 }
3942 if (next != NULL)
3943 {
0f82e5c9 3944 SET_PREV_INSN (next) = insn;
96fba521 3945 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
3946 {
3947 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3948 SET_PREV_INSN (sequence->insn (0)) = insn;
3949 }
96fba521 3950 }
3ccb989e
SB
3951
3952 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3953 {
e6eda746
DM
3954 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3955 SET_PREV_INSN (sequence->insn (0)) = prev;
3956 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 3957 }
96fba521
SB
3958}
3959
23b2ce53
RS
3960/* Add INSN to the end of the doubly-linked list.
3961 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3962
3963void
9152e0aa 3964add_insn (rtx_insn *insn)
23b2ce53 3965{
9152e0aa 3966 rtx_insn *prev = get_last_insn ();
96fba521 3967 link_insn_into_chain (insn, prev, NULL);
5936d944
JH
3968 if (NULL == get_insns ())
3969 set_first_insn (insn);
5936d944 3970 set_last_insn (insn);
23b2ce53
RS
3971}
3972
96fba521 3973/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 3974
96fba521 3975static void
9152e0aa 3976add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 3977{
9152e0aa 3978 rtx_insn *next = NEXT_INSN (after);
23b2ce53 3979
5b0264cb 3980 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3981
96fba521 3982 link_insn_into_chain (insn, after, next);
23b2ce53 3983
96fba521 3984 if (next == NULL)
23b2ce53 3985 {
96fba521
SB
3986 if (get_last_insn () == after)
3987 set_last_insn (insn);
3988 else
3989 {
3990 struct sequence_stack *stack = seq_stack;
3991 /* Scan all pending sequences too. */
3992 for (; stack; stack = stack->next)
3993 if (after == stack->last)
3994 {
3995 stack->last = insn;
3996 break;
3997 }
3998 }
23b2ce53 3999 }
96fba521
SB
4000}
4001
4002/* Add INSN into the doubly-linked list before insn BEFORE. */
4003
4004static void
9152e0aa 4005add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4006{
9152e0aa 4007 rtx_insn *prev = PREV_INSN (before);
96fba521
SB
4008
4009 gcc_assert (!optimize || !INSN_DELETED_P (before));
4010
4011 link_insn_into_chain (insn, prev, before);
4012
4013 if (prev == NULL)
23b2ce53 4014 {
96fba521
SB
4015 if (get_insns () == before)
4016 set_first_insn (insn);
4017 else
4018 {
4019 struct sequence_stack *stack = seq_stack;
4020 /* Scan all pending sequences too. */
4021 for (; stack; stack = stack->next)
4022 if (before == stack->first)
4023 {
4024 stack->first = insn;
4025 break;
4026 }
a0ae8e8d 4027
96fba521
SB
4028 gcc_assert (stack);
4029 }
23b2ce53 4030 }
96fba521
SB
4031}
4032
4033/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4034 If BB is NULL, an attempt is made to infer the bb from before.
4035
4036 This and the next function should be the only functions called
4037 to insert an insn once delay slots have been filled since only
4038 they know how to update a SEQUENCE. */
23b2ce53 4039
96fba521 4040void
9152e0aa 4041add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
96fba521 4042{
1130d5e3 4043 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
9152e0aa 4044 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
96fba521 4045 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4046 if (!BARRIER_P (after)
4047 && !BARRIER_P (insn)
3c030e88
JH
4048 && (bb = BLOCK_FOR_INSN (after)))
4049 {
4050 set_block_for_insn (insn, bb);
38c1593d 4051 if (INSN_P (insn))
6fb5fa3c 4052 df_insn_rescan (insn);
3c030e88 4053 /* Should not happen as first in the BB is always
a1f300c0 4054 either NOTE or LABEL. */
a813c111 4055 if (BB_END (bb) == after
3c030e88 4056 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4057 && !BARRIER_P (insn)
a38e7aa5 4058 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4059 BB_END (bb) = insn;
3c030e88 4060 }
23b2ce53
RS
4061}
4062
96fba521
SB
4063/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4064 If BB is NULL, an attempt is made to infer the bb from before.
4065
4066 This and the previous function should be the only functions called
4067 to insert an insn once delay slots have been filled since only
4068 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4069
4070void
9152e0aa 4071add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
a0ae8e8d 4072{
9152e0aa
DM
4073 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4074 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
96fba521 4075 add_insn_before_nobb (insn, before);
a0ae8e8d 4076
b8698a0f 4077 if (!bb
6fb5fa3c
DB
4078 && !BARRIER_P (before)
4079 && !BARRIER_P (insn))
4080 bb = BLOCK_FOR_INSN (before);
4081
4082 if (bb)
3c030e88
JH
4083 {
4084 set_block_for_insn (insn, bb);
38c1593d 4085 if (INSN_P (insn))
6fb5fa3c 4086 df_insn_rescan (insn);
5b0264cb 4087 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4088 LABEL. */
5b0264cb
NS
4089 gcc_assert (BB_HEAD (bb) != insn
4090 /* Avoid clobbering of structure when creating new BB. */
4091 || BARRIER_P (insn)
a38e7aa5 4092 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4093 }
a0ae8e8d
RK
4094}
4095
6fb5fa3c
DB
4096/* Replace insn with an deleted instruction note. */
4097
0ce2b299
EB
4098void
4099set_insn_deleted (rtx insn)
6fb5fa3c 4100{
39718607 4101 if (INSN_P (insn))
b2908ba6 4102 df_insn_delete (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
4103 PUT_CODE (insn, NOTE);
4104 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4105}
4106
4107
1f397f45
SB
4108/* Unlink INSN from the insn chain.
4109
4110 This function knows how to handle sequences.
4111
4112 This function does not invalidate data flow information associated with
4113 INSN (i.e. does not call df_insn_delete). That makes this function
4114 usable for only disconnecting an insn from the chain, and re-emit it
4115 elsewhere later.
4116
4117 To later insert INSN elsewhere in the insn chain via add_insn and
4118 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4119 the caller. Nullifying them here breaks many insn chain walks.
4120
4121 To really delete an insn and related DF information, use delete_insn. */
4122
89e99eea 4123void
dc01c3d1 4124remove_insn (rtx uncast_insn)
89e99eea 4125{
dc01c3d1 4126 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1130d5e3
DM
4127 rtx_insn *next = NEXT_INSN (insn);
4128 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4129 basic_block bb;
4130
89e99eea
DB
4131 if (prev)
4132 {
0f82e5c9 4133 SET_NEXT_INSN (prev) = next;
4b4bf941 4134 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4135 {
e6eda746
DM
4136 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4137 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4138 }
4139 }
5936d944
JH
4140 else if (get_insns () == insn)
4141 {
fb9ef4c1 4142 if (next)
0f82e5c9 4143 SET_PREV_INSN (next) = NULL;
5936d944
JH
4144 set_first_insn (next);
4145 }
89e99eea
DB
4146 else
4147 {
49ad7cfa 4148 struct sequence_stack *stack = seq_stack;
89e99eea
DB
4149 /* Scan all pending sequences too. */
4150 for (; stack; stack = stack->next)
4151 if (insn == stack->first)
4152 {
4153 stack->first = next;
4154 break;
4155 }
4156
5b0264cb 4157 gcc_assert (stack);
89e99eea
DB
4158 }
4159
4160 if (next)
4161 {
0f82e5c9 4162 SET_PREV_INSN (next) = prev;
4b4bf941 4163 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4164 {
4165 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4166 SET_PREV_INSN (sequence->insn (0)) = prev;
4167 }
89e99eea 4168 }
5936d944
JH
4169 else if (get_last_insn () == insn)
4170 set_last_insn (prev);
89e99eea
DB
4171 else
4172 {
49ad7cfa 4173 struct sequence_stack *stack = seq_stack;
89e99eea
DB
4174 /* Scan all pending sequences too. */
4175 for (; stack; stack = stack->next)
4176 if (insn == stack->last)
4177 {
4178 stack->last = prev;
4179 break;
4180 }
4181
5b0264cb 4182 gcc_assert (stack);
89e99eea 4183 }
80eb8028 4184
80eb8028 4185 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4186 if (!BARRIER_P (insn)
53c17031
JH
4187 && (bb = BLOCK_FOR_INSN (insn)))
4188 {
a813c111 4189 if (BB_HEAD (bb) == insn)
53c17031 4190 {
3bf1e984
RK
4191 /* Never ever delete the basic block note without deleting whole
4192 basic block. */
5b0264cb 4193 gcc_assert (!NOTE_P (insn));
1130d5e3 4194 BB_HEAD (bb) = next;
53c17031 4195 }
a813c111 4196 if (BB_END (bb) == insn)
1130d5e3 4197 BB_END (bb) = prev;
53c17031 4198 }
89e99eea
DB
4199}
4200
ee960939
OH
4201/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4202
4203void
502b8322 4204add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4205{
5b0264cb 4206 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4207
4208 /* Put the register usage information on the CALL. If there is already
4209 some usage information, put ours at the end. */
4210 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4211 {
4212 rtx link;
4213
4214 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4215 link = XEXP (link, 1))
4216 ;
4217
4218 XEXP (link, 1) = call_fusage;
4219 }
4220 else
4221 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4222}
4223
23b2ce53
RS
4224/* Delete all insns made since FROM.
4225 FROM becomes the new last instruction. */
4226
4227void
fee3e72c 4228delete_insns_since (rtx_insn *from)
23b2ce53
RS
4229{
4230 if (from == 0)
5936d944 4231 set_first_insn (0);
23b2ce53 4232 else
0f82e5c9 4233 SET_NEXT_INSN (from) = 0;
5936d944 4234 set_last_insn (from);
23b2ce53
RS
4235}
4236
5dab5552
MS
4237/* This function is deprecated, please use sequences instead.
4238
4239 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4240 The insns to be moved are those between FROM and TO.
4241 They are moved to a new position after the insn AFTER.
4242 AFTER must not be FROM or TO or any insn in between.
4243
4244 This function does not know about SEQUENCEs and hence should not be
4245 called after delay-slot filling has been done. */
4246
4247void
fee3e72c 4248reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4249{
4f8344eb 4250#ifdef ENABLE_CHECKING
fee3e72c 4251 rtx_insn *x;
4f8344eb
HPN
4252 for (x = from; x != to; x = NEXT_INSN (x))
4253 gcc_assert (after != x);
4254 gcc_assert (after != to);
4255#endif
4256
23b2ce53
RS
4257 /* Splice this bunch out of where it is now. */
4258 if (PREV_INSN (from))
0f82e5c9 4259 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4260 if (NEXT_INSN (to))
0f82e5c9 4261 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4262 if (get_last_insn () == to)
4263 set_last_insn (PREV_INSN (from));
4264 if (get_insns () == from)
4265 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4266
4267 /* Make the new neighbors point to it and it to them. */
4268 if (NEXT_INSN (after))
0f82e5c9 4269 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4270
0f82e5c9
DM
4271 SET_NEXT_INSN (to) = NEXT_INSN (after);
4272 SET_PREV_INSN (from) = after;
4273 SET_NEXT_INSN (after) = from;
c3284718 4274 if (after == get_last_insn ())
5936d944 4275 set_last_insn (to);
23b2ce53
RS
4276}
4277
3c030e88
JH
4278/* Same as function above, but take care to update BB boundaries. */
4279void
ac9d2d2c 4280reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4281{
ac9d2d2c 4282 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4283 basic_block bb, bb2;
4284
4285 reorder_insns_nobb (from, to, after);
4286
4b4bf941 4287 if (!BARRIER_P (after)
3c030e88
JH
4288 && (bb = BLOCK_FOR_INSN (after)))
4289 {
b2908ba6 4290 rtx_insn *x;
6fb5fa3c 4291 df_set_bb_dirty (bb);
68252e27 4292
4b4bf941 4293 if (!BARRIER_P (from)
3c030e88
JH
4294 && (bb2 = BLOCK_FOR_INSN (from)))
4295 {
a813c111 4296 if (BB_END (bb2) == to)
1130d5e3 4297 BB_END (bb2) = prev;
6fb5fa3c 4298 df_set_bb_dirty (bb2);
3c030e88
JH
4299 }
4300
a813c111 4301 if (BB_END (bb) == after)
1130d5e3 4302 BB_END (bb) = to;
3c030e88
JH
4303
4304 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4305 if (!BARRIER_P (x))
63642d5a 4306 df_insn_change_bb (x, bb);
3c030e88
JH
4307 }
4308}
4309
23b2ce53 4310\f
2f937369
DM
4311/* Emit insn(s) of given code and pattern
4312 at a specified place within the doubly-linked list.
23b2ce53 4313
2f937369
DM
4314 All of the emit_foo global entry points accept an object
4315 X which is either an insn list or a PATTERN of a single
4316 instruction.
23b2ce53 4317
2f937369
DM
4318 There are thus a few canonical ways to generate code and
4319 emit it at a specific place in the instruction stream. For
4320 example, consider the instruction named SPOT and the fact that
4321 we would like to emit some instructions before SPOT. We might
4322 do it like this:
23b2ce53 4323
2f937369
DM
4324 start_sequence ();
4325 ... emit the new instructions ...
4326 insns_head = get_insns ();
4327 end_sequence ();
23b2ce53 4328
2f937369 4329 emit_insn_before (insns_head, SPOT);
23b2ce53 4330
2f937369
DM
4331 It used to be common to generate SEQUENCE rtl instead, but that
4332 is a relic of the past which no longer occurs. The reason is that
4333 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4334 generated would almost certainly die right after it was created. */
23b2ce53 4335
cd459bf8 4336static rtx_insn *
5f02387d 4337emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
167b9fae 4338 rtx_insn *(*make_raw) (rtx))
23b2ce53 4339{
167b9fae 4340 rtx_insn *insn;
23b2ce53 4341
5b0264cb 4342 gcc_assert (before);
2f937369
DM
4343
4344 if (x == NULL_RTX)
cd459bf8 4345 return safe_as_a <rtx_insn *> (last);
2f937369
DM
4346
4347 switch (GET_CODE (x))
23b2ce53 4348 {
b5b8b0ac 4349 case DEBUG_INSN:
2f937369
DM
4350 case INSN:
4351 case JUMP_INSN:
4352 case CALL_INSN:
4353 case CODE_LABEL:
4354 case BARRIER:
4355 case NOTE:
167b9fae 4356 insn = as_a <rtx_insn *> (x);
2f937369
DM
4357 while (insn)
4358 {
167b9fae 4359 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4360 add_insn_before (insn, before, bb);
2f937369
DM
4361 last = insn;
4362 insn = next;
4363 }
4364 break;
4365
4366#ifdef ENABLE_RTL_CHECKING
4367 case SEQUENCE:
5b0264cb 4368 gcc_unreachable ();
2f937369
DM
4369 break;
4370#endif
4371
4372 default:
5f02387d 4373 last = (*make_raw) (x);
6fb5fa3c 4374 add_insn_before (last, before, bb);
2f937369 4375 break;
23b2ce53
RS
4376 }
4377
cd459bf8 4378 return safe_as_a <rtx_insn *> (last);
23b2ce53
RS
4379}
4380
5f02387d
NF
4381/* Make X be output before the instruction BEFORE. */
4382
cd459bf8 4383rtx_insn *
5f02387d
NF
4384emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4385{
4386 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4387}
4388
2f937369 4389/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4390 and output it before the instruction BEFORE. */
4391
cd459bf8 4392rtx_insn *
a7102479 4393emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4394{
5f02387d
NF
4395 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4396 make_jump_insn_raw);
23b2ce53
RS
4397}
4398
2f937369 4399/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4400 and output it before the instruction BEFORE. */
4401
cd459bf8 4402rtx_insn *
a7102479 4403emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4404{
5f02387d
NF
4405 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4406 make_call_insn_raw);
969d70ca
JH
4407}
4408
b5b8b0ac
AO
4409/* Make an instruction with body X and code DEBUG_INSN
4410 and output it before the instruction BEFORE. */
4411
cd459bf8 4412rtx_insn *
b5b8b0ac
AO
4413emit_debug_insn_before_noloc (rtx x, rtx before)
4414{
5f02387d
NF
4415 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4416 make_debug_insn_raw);
b5b8b0ac
AO
4417}
4418
23b2ce53 4419/* Make an insn of code BARRIER
e881bb1b 4420 and output it before the insn BEFORE. */
23b2ce53 4421
cd459bf8 4422rtx_barrier *
502b8322 4423emit_barrier_before (rtx before)
23b2ce53 4424{
cd459bf8 4425 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4426
4427 INSN_UID (insn) = cur_insn_uid++;
4428
6fb5fa3c 4429 add_insn_before (insn, before, NULL);
23b2ce53
RS
4430 return insn;
4431}
4432
e881bb1b
RH
4433/* Emit the label LABEL before the insn BEFORE. */
4434
cd459bf8 4435rtx_insn *
502b8322 4436emit_label_before (rtx label, rtx before)
e881bb1b 4437{
468660d3
SB
4438 gcc_checking_assert (INSN_UID (label) == 0);
4439 INSN_UID (label) = cur_insn_uid++;
4440 add_insn_before (label, before, NULL);
cd459bf8 4441 return as_a <rtx_insn *> (label);
e881bb1b 4442}
23b2ce53 4443\f
2f937369
DM
4444/* Helper for emit_insn_after, handles lists of instructions
4445 efficiently. */
23b2ce53 4446
e6eda746
DM
4447static rtx_insn *
4448emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
23b2ce53 4449{
e6eda746 4450 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
1130d5e3
DM
4451 rtx_insn *last;
4452 rtx_insn *after_after;
6fb5fa3c
DB
4453 if (!bb && !BARRIER_P (after))
4454 bb = BLOCK_FOR_INSN (after);
23b2ce53 4455
6fb5fa3c 4456 if (bb)
23b2ce53 4457 {
6fb5fa3c 4458 df_set_bb_dirty (bb);
2f937369 4459 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4460 if (!BARRIER_P (last))
6fb5fa3c
DB
4461 {
4462 set_block_for_insn (last, bb);
4463 df_insn_rescan (last);
4464 }
4b4bf941 4465 if (!BARRIER_P (last))
6fb5fa3c
DB
4466 {
4467 set_block_for_insn (last, bb);
4468 df_insn_rescan (last);
4469 }
a813c111 4470 if (BB_END (bb) == after)
1130d5e3 4471 BB_END (bb) = last;
23b2ce53
RS
4472 }
4473 else
2f937369
DM
4474 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4475 continue;
4476
4477 after_after = NEXT_INSN (after);
4478
0f82e5c9
DM
4479 SET_NEXT_INSN (after) = first;
4480 SET_PREV_INSN (first) = after;
4481 SET_NEXT_INSN (last) = after_after;
2f937369 4482 if (after_after)
0f82e5c9 4483 SET_PREV_INSN (after_after) = last;
2f937369 4484
c3284718 4485 if (after == get_last_insn ())
5936d944 4486 set_last_insn (last);
e855c69d 4487
2f937369
DM
4488 return last;
4489}
4490
cd459bf8 4491static rtx_insn *
e6eda746 4492emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
167b9fae 4493 rtx_insn *(*make_raw)(rtx))
2f937369 4494{
e6eda746
DM
4495 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4496 rtx_insn *last = after;
2f937369 4497
5b0264cb 4498 gcc_assert (after);
2f937369
DM
4499
4500 if (x == NULL_RTX)
e6eda746 4501 return last;
2f937369
DM
4502
4503 switch (GET_CODE (x))
23b2ce53 4504 {
b5b8b0ac 4505 case DEBUG_INSN:
2f937369
DM
4506 case INSN:
4507 case JUMP_INSN:
4508 case CALL_INSN:
4509 case CODE_LABEL:
4510 case BARRIER:
4511 case NOTE:
1130d5e3 4512 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4513 break;
4514
4515#ifdef ENABLE_RTL_CHECKING
4516 case SEQUENCE:
5b0264cb 4517 gcc_unreachable ();
2f937369
DM
4518 break;
4519#endif
4520
4521 default:
5f02387d 4522 last = (*make_raw) (x);
6fb5fa3c 4523 add_insn_after (last, after, bb);
2f937369 4524 break;
23b2ce53
RS
4525 }
4526
e6eda746 4527 return last;
23b2ce53
RS
4528}
4529
5f02387d
NF
4530/* Make X be output after the insn AFTER and set the BB of insn. If
4531 BB is NULL, an attempt is made to infer the BB from AFTER. */
4532
cd459bf8 4533rtx_insn *
5f02387d
NF
4534emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4535{
4536 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4537}
4538
255680cf 4539
2f937369 4540/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4541 and output it after the insn AFTER. */
4542
cd459bf8 4543rtx_insn *
a7102479 4544emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4545{
5f02387d 4546 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
2f937369
DM
4547}
4548
4549/* Make an instruction with body X and code CALL_INSN
4550 and output it after the instruction AFTER. */
4551
cd459bf8 4552rtx_insn *
a7102479 4553emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4554{
5f02387d 4555 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4556}
4557
b5b8b0ac
AO
4558/* Make an instruction with body X and code CALL_INSN
4559 and output it after the instruction AFTER. */
4560
cd459bf8 4561rtx_insn *
b5b8b0ac
AO
4562emit_debug_insn_after_noloc (rtx x, rtx after)
4563{
5f02387d 4564 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4565}
4566
23b2ce53
RS
4567/* Make an insn of code BARRIER
4568 and output it after the insn AFTER. */
4569
cd459bf8 4570rtx_barrier *
502b8322 4571emit_barrier_after (rtx after)
23b2ce53 4572{
cd459bf8 4573 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4574
4575 INSN_UID (insn) = cur_insn_uid++;
4576
6fb5fa3c 4577 add_insn_after (insn, after, NULL);
23b2ce53
RS
4578 return insn;
4579}
4580
4581/* Emit the label LABEL after the insn AFTER. */
4582
cd459bf8 4583rtx_insn *
502b8322 4584emit_label_after (rtx label, rtx after)
23b2ce53 4585{
468660d3
SB
4586 gcc_checking_assert (INSN_UID (label) == 0);
4587 INSN_UID (label) = cur_insn_uid++;
4588 add_insn_after (label, after, NULL);
cd459bf8 4589 return as_a <rtx_insn *> (label);
23b2ce53 4590}
96fba521
SB
4591\f
4592/* Notes require a bit of special handling: Some notes need to have their
4593 BLOCK_FOR_INSN set, others should never have it set, and some should
4594 have it set or clear depending on the context. */
4595
4596/* Return true iff a note of kind SUBTYPE should be emitted with routines
4597 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4598 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4599
4600static bool
4601note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4602{
4603 switch (subtype)
4604 {
4605 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4606 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4607 return true;
4608
4609 /* Notes for var tracking and EH region markers can appear between or
4610 inside basic blocks. If the caller is emitting on the basic block
4611 boundary, do not set BLOCK_FOR_INSN on the new note. */
4612 case NOTE_INSN_VAR_LOCATION:
4613 case NOTE_INSN_CALL_ARG_LOCATION:
4614 case NOTE_INSN_EH_REGION_BEG:
4615 case NOTE_INSN_EH_REGION_END:
4616 return on_bb_boundary_p;
4617
4618 /* Otherwise, BLOCK_FOR_INSN must be set. */
4619 default:
4620 return false;
4621 }
4622}
23b2ce53
RS
4623
4624/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4625
66e8df53 4626rtx_note *
9152e0aa 4627emit_note_after (enum insn_note subtype, rtx uncast_after)
23b2ce53 4628{
9152e0aa 4629 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
66e8df53 4630 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4631 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4632 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4633
4634 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4635 add_insn_after_nobb (note, after);
4636 else
4637 add_insn_after (note, after, bb);
4638 return note;
4639}
4640
4641/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4642
66e8df53 4643rtx_note *
9152e0aa 4644emit_note_before (enum insn_note subtype, rtx uncast_before)
96fba521 4645{
9152e0aa 4646 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
66e8df53 4647 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4648 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4649 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4650
4651 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4652 add_insn_before_nobb (note, before);
4653 else
4654 add_insn_before (note, before, bb);
23b2ce53
RS
4655 return note;
4656}
23b2ce53 4657\f
e8110d6f
NF
4658/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4659 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4660
cd459bf8 4661static rtx_insn *
dc01c3d1 4662emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
167b9fae 4663 rtx_insn *(*make_raw) (rtx))
0d682900 4664{
dc01c3d1 4665 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
e8110d6f 4666 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4667
a7102479 4668 if (pattern == NULL_RTX || !loc)
cd459bf8 4669 return safe_as_a <rtx_insn *> (last);
dd3adcf8 4670
2f937369
DM
4671 after = NEXT_INSN (after);
4672 while (1)
4673 {
5368224f
DC
4674 if (active_insn_p (after) && !INSN_LOCATION (after))
4675 INSN_LOCATION (after) = loc;
2f937369
DM
4676 if (after == last)
4677 break;
4678 after = NEXT_INSN (after);
4679 }
cd459bf8 4680 return safe_as_a <rtx_insn *> (last);
0d682900
JH
4681}
4682
e8110d6f
NF
4683/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4684 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4685 any DEBUG_INSNs. */
4686
cd459bf8 4687static rtx_insn *
dc01c3d1 4688emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
167b9fae 4689 rtx_insn *(*make_raw) (rtx))
a7102479 4690{
dc01c3d1
DM
4691 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4692 rtx_insn *prev = after;
b5b8b0ac 4693
e8110d6f
NF
4694 if (skip_debug_insns)
4695 while (DEBUG_INSN_P (prev))
4696 prev = PREV_INSN (prev);
b5b8b0ac
AO
4697
4698 if (INSN_P (prev))
5368224f 4699 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4700 make_raw);
a7102479 4701 else
e8110d6f 4702 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4703}
4704
5368224f 4705/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4706rtx_insn *
e8110d6f 4707emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4708{
e8110d6f
NF
4709 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4710}
2f937369 4711
5368224f 4712/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4713rtx_insn *
e8110d6f
NF
4714emit_insn_after (rtx pattern, rtx after)
4715{
4716 return emit_pattern_after (pattern, after, true, make_insn_raw);
4717}
dd3adcf8 4718
5368224f 4719/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4720rtx_insn *
e8110d6f
NF
4721emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4722{
4723 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
0d682900
JH
4724}
4725
5368224f 4726/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4727rtx_insn *
a7102479
JH
4728emit_jump_insn_after (rtx pattern, rtx after)
4729{
e8110d6f 4730 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
a7102479
JH
4731}
4732
5368224f 4733/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4734rtx_insn *
502b8322 4735emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4736{
e8110d6f 4737 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4738}
4739
5368224f 4740/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4741rtx_insn *
a7102479
JH
4742emit_call_insn_after (rtx pattern, rtx after)
4743{
e8110d6f 4744 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4745}
4746
5368224f 4747/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4748rtx_insn *
b5b8b0ac
AO
4749emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4750{
e8110d6f 4751 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4752}
4753
5368224f 4754/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4755rtx_insn *
b5b8b0ac
AO
4756emit_debug_insn_after (rtx pattern, rtx after)
4757{
e8110d6f 4758 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4759}
4760
e8110d6f
NF
4761/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4762 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4763 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4764 CALL_INSN, etc. */
4765
cd459bf8 4766static rtx_insn *
dc01c3d1 4767emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
167b9fae 4768 rtx_insn *(*make_raw) (rtx))
0d682900 4769{
dc01c3d1
DM
4770 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4771 rtx_insn *first = PREV_INSN (before);
4772 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4773 insnp ? before : NULL_RTX,
4774 NULL, make_raw);
a7102479
JH
4775
4776 if (pattern == NULL_RTX || !loc)
dc01c3d1 4777 return last;
a7102479 4778
26cb3993
JH
4779 if (!first)
4780 first = get_insns ();
4781 else
4782 first = NEXT_INSN (first);
a7102479
JH
4783 while (1)
4784 {
5368224f
DC
4785 if (active_insn_p (first) && !INSN_LOCATION (first))
4786 INSN_LOCATION (first) = loc;
a7102479
JH
4787 if (first == last)
4788 break;
4789 first = NEXT_INSN (first);
4790 }
dc01c3d1 4791 return last;
a7102479
JH
4792}
4793
e8110d6f
NF
4794/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4795 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4796 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4797 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4798
cd459bf8 4799static rtx_insn *
dc01c3d1 4800emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
167b9fae 4801 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 4802{
dc01c3d1
DM
4803 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4804 rtx_insn *next = before;
b5b8b0ac 4805
e8110d6f
NF
4806 if (skip_debug_insns)
4807 while (DEBUG_INSN_P (next))
4808 next = PREV_INSN (next);
b5b8b0ac
AO
4809
4810 if (INSN_P (next))
5368224f 4811 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4812 insnp, make_raw);
a7102479 4813 else
e8110d6f
NF
4814 return emit_pattern_before_noloc (pattern, before,
4815 insnp ? before : NULL_RTX,
4816 NULL, make_raw);
a7102479
JH
4817}
4818
5368224f 4819/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4820rtx_insn *
e8110d6f 4821emit_insn_before_setloc (rtx pattern, rtx before, int loc)
a7102479 4822{
e8110d6f
NF
4823 return emit_pattern_before_setloc (pattern, before, loc, true,
4824 make_insn_raw);
4825}
a7102479 4826
5368224f 4827/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4828rtx_insn *
e8110d6f
NF
4829emit_insn_before (rtx pattern, rtx before)
4830{
4831 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4832}
a7102479 4833
5368224f 4834/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4835rtx_insn *
e8110d6f
NF
4836emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4837{
4838 return emit_pattern_before_setloc (pattern, before, loc, false,
4839 make_jump_insn_raw);
a7102479
JH
4840}
4841
5368224f 4842/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4843rtx_insn *
a7102479
JH
4844emit_jump_insn_before (rtx pattern, rtx before)
4845{
e8110d6f
NF
4846 return emit_pattern_before (pattern, before, true, false,
4847 make_jump_insn_raw);
a7102479
JH
4848}
4849
5368224f 4850/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4851rtx_insn *
a7102479
JH
4852emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4853{
e8110d6f
NF
4854 return emit_pattern_before_setloc (pattern, before, loc, false,
4855 make_call_insn_raw);
0d682900 4856}
a7102479 4857
e8110d6f 4858/* Like emit_call_insn_before_noloc,
5368224f 4859 but set insn_location according to BEFORE. */
cd459bf8 4860rtx_insn *
a7102479
JH
4861emit_call_insn_before (rtx pattern, rtx before)
4862{
e8110d6f
NF
4863 return emit_pattern_before (pattern, before, true, false,
4864 make_call_insn_raw);
a7102479 4865}
b5b8b0ac 4866
5368224f 4867/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4868rtx_insn *
b5b8b0ac
AO
4869emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4870{
e8110d6f
NF
4871 return emit_pattern_before_setloc (pattern, before, loc, false,
4872 make_debug_insn_raw);
b5b8b0ac
AO
4873}
4874
e8110d6f 4875/* Like emit_debug_insn_before_noloc,
5368224f 4876 but set insn_location according to BEFORE. */
cd459bf8 4877rtx_insn *
b5b8b0ac
AO
4878emit_debug_insn_before (rtx pattern, rtx before)
4879{
e8110d6f
NF
4880 return emit_pattern_before (pattern, before, false, false,
4881 make_debug_insn_raw);
b5b8b0ac 4882}
0d682900 4883\f
2f937369
DM
4884/* Take X and emit it at the end of the doubly-linked
4885 INSN list.
23b2ce53
RS
4886
4887 Returns the last insn emitted. */
4888
cd459bf8 4889rtx_insn *
502b8322 4890emit_insn (rtx x)
23b2ce53 4891{
cd459bf8
DM
4892 rtx_insn *last = get_last_insn ();
4893 rtx_insn *insn;
23b2ce53 4894
2f937369
DM
4895 if (x == NULL_RTX)
4896 return last;
23b2ce53 4897
2f937369
DM
4898 switch (GET_CODE (x))
4899 {
b5b8b0ac 4900 case DEBUG_INSN:
2f937369
DM
4901 case INSN:
4902 case JUMP_INSN:
4903 case CALL_INSN:
4904 case CODE_LABEL:
4905 case BARRIER:
4906 case NOTE:
cd459bf8 4907 insn = as_a <rtx_insn *> (x);
2f937369 4908 while (insn)
23b2ce53 4909 {
cd459bf8 4910 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 4911 add_insn (insn);
2f937369
DM
4912 last = insn;
4913 insn = next;
23b2ce53 4914 }
2f937369 4915 break;
23b2ce53 4916
2f937369 4917#ifdef ENABLE_RTL_CHECKING
39718607 4918 case JUMP_TABLE_DATA:
2f937369 4919 case SEQUENCE:
5b0264cb 4920 gcc_unreachable ();
2f937369
DM
4921 break;
4922#endif
23b2ce53 4923
2f937369
DM
4924 default:
4925 last = make_insn_raw (x);
4926 add_insn (last);
4927 break;
23b2ce53
RS
4928 }
4929
4930 return last;
4931}
4932
b5b8b0ac
AO
4933/* Make an insn of code DEBUG_INSN with pattern X
4934 and add it to the end of the doubly-linked list. */
4935
cd459bf8 4936rtx_insn *
b5b8b0ac
AO
4937emit_debug_insn (rtx x)
4938{
cd459bf8
DM
4939 rtx_insn *last = get_last_insn ();
4940 rtx_insn *insn;
b5b8b0ac
AO
4941
4942 if (x == NULL_RTX)
4943 return last;
4944
4945 switch (GET_CODE (x))
4946 {
4947 case DEBUG_INSN:
4948 case INSN:
4949 case JUMP_INSN:
4950 case CALL_INSN:
4951 case CODE_LABEL:
4952 case BARRIER:
4953 case NOTE:
cd459bf8 4954 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
4955 while (insn)
4956 {
cd459bf8 4957 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
4958 add_insn (insn);
4959 last = insn;
4960 insn = next;
4961 }
4962 break;
4963
4964#ifdef ENABLE_RTL_CHECKING
39718607 4965 case JUMP_TABLE_DATA:
b5b8b0ac
AO
4966 case SEQUENCE:
4967 gcc_unreachable ();
4968 break;
4969#endif
4970
4971 default:
4972 last = make_debug_insn_raw (x);
4973 add_insn (last);
4974 break;
4975 }
4976
4977 return last;
4978}
4979
2f937369
DM
4980/* Make an insn of code JUMP_INSN with pattern X
4981 and add it to the end of the doubly-linked list. */
23b2ce53 4982
cd459bf8 4983rtx_insn *
502b8322 4984emit_jump_insn (rtx x)
23b2ce53 4985{
cd459bf8
DM
4986 rtx_insn *last = NULL;
4987 rtx_insn *insn;
23b2ce53 4988
2f937369 4989 switch (GET_CODE (x))
23b2ce53 4990 {
b5b8b0ac 4991 case DEBUG_INSN:
2f937369
DM
4992 case INSN:
4993 case JUMP_INSN:
4994 case CALL_INSN:
4995 case CODE_LABEL:
4996 case BARRIER:
4997 case NOTE:
cd459bf8 4998 insn = as_a <rtx_insn *> (x);
2f937369
DM
4999 while (insn)
5000 {
cd459bf8 5001 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5002 add_insn (insn);
5003 last = insn;
5004 insn = next;
5005 }
5006 break;
e0a5c5eb 5007
2f937369 5008#ifdef ENABLE_RTL_CHECKING
39718607 5009 case JUMP_TABLE_DATA:
2f937369 5010 case SEQUENCE:
5b0264cb 5011 gcc_unreachable ();
2f937369
DM
5012 break;
5013#endif
e0a5c5eb 5014
2f937369
DM
5015 default:
5016 last = make_jump_insn_raw (x);
5017 add_insn (last);
5018 break;
3c030e88 5019 }
e0a5c5eb
RS
5020
5021 return last;
5022}
5023
2f937369 5024/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5025 and add it to the end of the doubly-linked list. */
5026
cd459bf8 5027rtx_insn *
502b8322 5028emit_call_insn (rtx x)
23b2ce53 5029{
cd459bf8 5030 rtx_insn *insn;
2f937369
DM
5031
5032 switch (GET_CODE (x))
23b2ce53 5033 {
b5b8b0ac 5034 case DEBUG_INSN:
2f937369
DM
5035 case INSN:
5036 case JUMP_INSN:
5037 case CALL_INSN:
5038 case CODE_LABEL:
5039 case BARRIER:
5040 case NOTE:
5041 insn = emit_insn (x);
5042 break;
23b2ce53 5043
2f937369
DM
5044#ifdef ENABLE_RTL_CHECKING
5045 case SEQUENCE:
39718607 5046 case JUMP_TABLE_DATA:
5b0264cb 5047 gcc_unreachable ();
2f937369
DM
5048 break;
5049#endif
23b2ce53 5050
2f937369
DM
5051 default:
5052 insn = make_call_insn_raw (x);
23b2ce53 5053 add_insn (insn);
2f937369 5054 break;
23b2ce53 5055 }
2f937369
DM
5056
5057 return insn;
23b2ce53
RS
5058}
5059
5060/* Add the label LABEL to the end of the doubly-linked list. */
5061
cd459bf8 5062rtx_insn *
502b8322 5063emit_label (rtx label)
23b2ce53 5064{
468660d3
SB
5065 gcc_checking_assert (INSN_UID (label) == 0);
5066 INSN_UID (label) = cur_insn_uid++;
9152e0aa 5067 add_insn (as_a <rtx_insn *> (label));
cd459bf8 5068 return as_a <rtx_insn *> (label);
23b2ce53
RS
5069}
5070
39718607
SB
5071/* Make an insn of code JUMP_TABLE_DATA
5072 and add it to the end of the doubly-linked list. */
5073
4598afdd 5074rtx_jump_table_data *
39718607
SB
5075emit_jump_table_data (rtx table)
5076{
4598afdd
DM
5077 rtx_jump_table_data *jump_table_data =
5078 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5079 INSN_UID (jump_table_data) = cur_insn_uid++;
5080 PATTERN (jump_table_data) = table;
5081 BLOCK_FOR_INSN (jump_table_data) = NULL;
5082 add_insn (jump_table_data);
5083 return jump_table_data;
5084}
5085
23b2ce53
RS
5086/* Make an insn of code BARRIER
5087 and add it to the end of the doubly-linked list. */
5088
cd459bf8 5089rtx_barrier *
502b8322 5090emit_barrier (void)
23b2ce53 5091{
cd459bf8 5092 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5093 INSN_UID (barrier) = cur_insn_uid++;
5094 add_insn (barrier);
5095 return barrier;
5096}
5097
5f2fc772 5098/* Emit a copy of note ORIG. */
502b8322 5099
66e8df53
DM
5100rtx_note *
5101emit_note_copy (rtx_note *orig)
5f2fc772 5102{
96fba521 5103 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5104 rtx_note *note = make_note_raw (kind);
5f2fc772 5105 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5106 add_insn (note);
2e040219 5107 return note;
23b2ce53
RS
5108}
5109
2e040219
NS
5110/* Make an insn of code NOTE or type NOTE_NO
5111 and add it to the end of the doubly-linked list. */
23b2ce53 5112
66e8df53 5113rtx_note *
a38e7aa5 5114emit_note (enum insn_note kind)
23b2ce53 5115{
66e8df53 5116 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5117 add_insn (note);
5118 return note;
5119}
5120
c41c1387
RS
5121/* Emit a clobber of lvalue X. */
5122
cd459bf8 5123rtx_insn *
c41c1387
RS
5124emit_clobber (rtx x)
5125{
5126 /* CONCATs should not appear in the insn stream. */
5127 if (GET_CODE (x) == CONCAT)
5128 {
5129 emit_clobber (XEXP (x, 0));
5130 return emit_clobber (XEXP (x, 1));
5131 }
5132 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5133}
5134
5135/* Return a sequence of insns to clobber lvalue X. */
5136
cd459bf8 5137rtx_insn *
c41c1387
RS
5138gen_clobber (rtx x)
5139{
cd459bf8 5140 rtx_insn *seq;
c41c1387
RS
5141
5142 start_sequence ();
5143 emit_clobber (x);
5144 seq = get_insns ();
5145 end_sequence ();
5146 return seq;
5147}
5148
5149/* Emit a use of rvalue X. */
5150
cd459bf8 5151rtx_insn *
c41c1387
RS
5152emit_use (rtx x)
5153{
5154 /* CONCATs should not appear in the insn stream. */
5155 if (GET_CODE (x) == CONCAT)
5156 {
5157 emit_use (XEXP (x, 0));
5158 return emit_use (XEXP (x, 1));
5159 }
5160 return emit_insn (gen_rtx_USE (VOIDmode, x));
5161}
5162
5163/* Return a sequence of insns to use rvalue X. */
5164
cd459bf8 5165rtx_insn *
c41c1387
RS
5166gen_use (rtx x)
5167{
cd459bf8 5168 rtx_insn *seq;
c41c1387
RS
5169
5170 start_sequence ();
5171 emit_use (x);
5172 seq = get_insns ();
5173 end_sequence ();
5174 return seq;
5175}
5176
c8912e53
RS
5177/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5178 Return the set in INSN that such notes describe, or NULL if the notes
5179 have no meaning for INSN. */
5180
5181rtx
5182set_for_reg_notes (rtx insn)
5183{
5184 rtx pat, reg;
5185
5186 if (!INSN_P (insn))
5187 return NULL_RTX;
5188
5189 pat = PATTERN (insn);
5190 if (GET_CODE (pat) == PARALLEL)
5191 {
5192 /* We do not use single_set because that ignores SETs of unused
5193 registers. REG_EQUAL and REG_EQUIV notes really do require the
5194 PARALLEL to have a single SET. */
5195 if (multiple_sets (insn))
5196 return NULL_RTX;
5197 pat = XVECEXP (pat, 0, 0);
5198 }
5199
5200 if (GET_CODE (pat) != SET)
5201 return NULL_RTX;
5202
5203 reg = SET_DEST (pat);
5204
5205 /* Notes apply to the contents of a STRICT_LOW_PART. */
5206 if (GET_CODE (reg) == STRICT_LOW_PART)
5207 reg = XEXP (reg, 0);
5208
5209 /* Check that we have a register. */
5210 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5211 return NULL_RTX;
5212
5213 return pat;
5214}
5215
87b47c85 5216/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5217 note of this type already exists, remove it first. */
87b47c85 5218
3d238248 5219rtx
502b8322 5220set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5221{
5222 rtx note = find_reg_note (insn, kind, NULL_RTX);
5223
52488da1
JW
5224 switch (kind)
5225 {
5226 case REG_EQUAL:
5227 case REG_EQUIV:
c8912e53
RS
5228 if (!set_for_reg_notes (insn))
5229 return NULL_RTX;
52488da1
JW
5230
5231 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5232 It serves no useful purpose and breaks eliminate_regs. */
5233 if (GET_CODE (datum) == ASM_OPERANDS)
5234 return NULL_RTX;
109374e2
RS
5235
5236 /* Notes with side effects are dangerous. Even if the side-effect
5237 initially mirrors one in PATTERN (INSN), later optimizations
5238 might alter the way that the final register value is calculated
5239 and so move or alter the side-effect in some way. The note would
5240 then no longer be a valid substitution for SET_SRC. */
5241 if (side_effects_p (datum))
5242 return NULL_RTX;
52488da1
JW
5243 break;
5244
5245 default:
5246 break;
5247 }
3d238248 5248
c8912e53
RS
5249 if (note)
5250 XEXP (note, 0) = datum;
5251 else
5252 {
5253 add_reg_note (insn, kind, datum);
5254 note = REG_NOTES (insn);
5255 }
6fb5fa3c
DB
5256
5257 switch (kind)
3d238248 5258 {
6fb5fa3c
DB
5259 case REG_EQUAL:
5260 case REG_EQUIV:
b2908ba6 5261 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5262 break;
5263 default:
5264 break;
3d238248 5265 }
87b47c85 5266
c8912e53 5267 return note;
87b47c85 5268}
7543f918
JR
5269
5270/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5271rtx
5272set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5273{
c8912e53 5274 rtx set = set_for_reg_notes (insn);
7543f918
JR
5275
5276 if (set && SET_DEST (set) == dst)
5277 return set_unique_reg_note (insn, kind, datum);
5278 return NULL_RTX;
5279}
23b2ce53
RS
5280\f
5281/* Return an indication of which type of insn should have X as a body.
5282 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5283
d78db459 5284static enum rtx_code
502b8322 5285classify_insn (rtx x)
23b2ce53 5286{
4b4bf941 5287 if (LABEL_P (x))
23b2ce53
RS
5288 return CODE_LABEL;
5289 if (GET_CODE (x) == CALL)
5290 return CALL_INSN;
26898771 5291 if (ANY_RETURN_P (x))
23b2ce53
RS
5292 return JUMP_INSN;
5293 if (GET_CODE (x) == SET)
5294 {
5295 if (SET_DEST (x) == pc_rtx)
5296 return JUMP_INSN;
5297 else if (GET_CODE (SET_SRC (x)) == CALL)
5298 return CALL_INSN;
5299 else
5300 return INSN;
5301 }
5302 if (GET_CODE (x) == PARALLEL)
5303 {
b3694847 5304 int j;
23b2ce53
RS
5305 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5306 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5307 return CALL_INSN;
5308 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5309 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5310 return JUMP_INSN;
5311 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5312 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5313 return CALL_INSN;
5314 }
5315 return INSN;
5316}
5317
5318/* Emit the rtl pattern X as an appropriate kind of insn.
5319 If X is a label, it is simply added into the insn chain. */
5320
cd459bf8 5321rtx_insn *
502b8322 5322emit (rtx x)
23b2ce53
RS
5323{
5324 enum rtx_code code = classify_insn (x);
5325
5b0264cb 5326 switch (code)
23b2ce53 5327 {
5b0264cb
NS
5328 case CODE_LABEL:
5329 return emit_label (x);
5330 case INSN:
5331 return emit_insn (x);
5332 case JUMP_INSN:
5333 {
cd459bf8 5334 rtx_insn *insn = emit_jump_insn (x);
5b0264cb
NS
5335 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5336 return emit_barrier ();
5337 return insn;
5338 }
5339 case CALL_INSN:
5340 return emit_call_insn (x);
b5b8b0ac
AO
5341 case DEBUG_INSN:
5342 return emit_debug_insn (x);
5b0264cb
NS
5343 default:
5344 gcc_unreachable ();
23b2ce53 5345 }
23b2ce53
RS
5346}
5347\f
e2500fed 5348/* Space for free sequence stack entries. */
1431042e 5349static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5350
4dfa0342
RH
5351/* Begin emitting insns to a sequence. If this sequence will contain
5352 something that might cause the compiler to pop arguments to function
5353 calls (because those pops have previously been deferred; see
5354 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5355 before calling this function. That will ensure that the deferred
5356 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5357
5358void
502b8322 5359start_sequence (void)
23b2ce53
RS
5360{
5361 struct sequence_stack *tem;
5362
e2500fed
GK
5363 if (free_sequence_stack != NULL)
5364 {
5365 tem = free_sequence_stack;
5366 free_sequence_stack = tem->next;
5367 }
5368 else
766090c2 5369 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5370
49ad7cfa 5371 tem->next = seq_stack;
5936d944
JH
5372 tem->first = get_insns ();
5373 tem->last = get_last_insn ();
23b2ce53 5374
49ad7cfa 5375 seq_stack = tem;
23b2ce53 5376
5936d944
JH
5377 set_first_insn (0);
5378 set_last_insn (0);
23b2ce53
RS
5379}
5380
5c7a310f
MM
5381/* Set up the insn chain starting with FIRST as the current sequence,
5382 saving the previously current one. See the documentation for
5383 start_sequence for more information about how to use this function. */
23b2ce53
RS
5384
5385void
fee3e72c 5386push_to_sequence (rtx_insn *first)
23b2ce53 5387{
fee3e72c 5388 rtx_insn *last;
23b2ce53
RS
5389
5390 start_sequence ();
5391
e84a58ff
EB
5392 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5393 ;
23b2ce53 5394
5936d944
JH
5395 set_first_insn (first);
5396 set_last_insn (last);
23b2ce53
RS
5397}
5398
bb27eeda
SE
5399/* Like push_to_sequence, but take the last insn as an argument to avoid
5400 looping through the list. */
5401
5402void
fee3e72c 5403push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5404{
5405 start_sequence ();
5406
5936d944
JH
5407 set_first_insn (first);
5408 set_last_insn (last);
bb27eeda
SE
5409}
5410
f15ae3a1
TW
5411/* Set up the outer-level insn chain
5412 as the current sequence, saving the previously current one. */
5413
5414void
502b8322 5415push_topmost_sequence (void)
f15ae3a1 5416{
aefdd5ab 5417 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5418
5419 start_sequence ();
5420
49ad7cfa 5421 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5422 top = stack;
5423
5936d944
JH
5424 set_first_insn (top->first);
5425 set_last_insn (top->last);
f15ae3a1
TW
5426}
5427
5428/* After emitting to the outer-level insn chain, update the outer-level
5429 insn chain, and restore the previous saved state. */
5430
5431void
502b8322 5432pop_topmost_sequence (void)
f15ae3a1 5433{
aefdd5ab 5434 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5435
49ad7cfa 5436 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5437 top = stack;
5438
5936d944
JH
5439 top->first = get_insns ();
5440 top->last = get_last_insn ();
f15ae3a1
TW
5441
5442 end_sequence ();
5443}
5444
23b2ce53
RS
5445/* After emitting to a sequence, restore previous saved state.
5446
5c7a310f 5447 To get the contents of the sequence just made, you must call
2f937369 5448 `get_insns' *before* calling here.
5c7a310f
MM
5449
5450 If the compiler might have deferred popping arguments while
5451 generating this sequence, and this sequence will not be immediately
5452 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5453 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5454 pops are inserted into this sequence, and not into some random
5455 location in the instruction stream. See INHIBIT_DEFER_POP for more
5456 information about deferred popping of arguments. */
23b2ce53
RS
5457
5458void
502b8322 5459end_sequence (void)
23b2ce53 5460{
49ad7cfa 5461 struct sequence_stack *tem = seq_stack;
23b2ce53 5462
5936d944
JH
5463 set_first_insn (tem->first);
5464 set_last_insn (tem->last);
49ad7cfa 5465 seq_stack = tem->next;
23b2ce53 5466
e2500fed
GK
5467 memset (tem, 0, sizeof (*tem));
5468 tem->next = free_sequence_stack;
5469 free_sequence_stack = tem;
23b2ce53
RS
5470}
5471
5472/* Return 1 if currently emitting into a sequence. */
5473
5474int
502b8322 5475in_sequence_p (void)
23b2ce53 5476{
49ad7cfa 5477 return seq_stack != 0;
23b2ce53 5478}
23b2ce53 5479\f
59ec66dc
MM
5480/* Put the various virtual registers into REGNO_REG_RTX. */
5481
2bbdec73 5482static void
bd60bab2 5483init_virtual_regs (void)
59ec66dc 5484{
bd60bab2
JH
5485 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5486 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5487 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5488 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5489 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5490 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5491 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5492}
5493
da43a810
BS
5494\f
5495/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5496static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5497static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5498static int copy_insn_n_scratches;
5499
5500/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5501 copied an ASM_OPERANDS.
5502 In that case, it is the original input-operand vector. */
5503static rtvec orig_asm_operands_vector;
5504
5505/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5506 copied an ASM_OPERANDS.
5507 In that case, it is the copied input-operand vector. */
5508static rtvec copy_asm_operands_vector;
5509
5510/* Likewise for the constraints vector. */
5511static rtvec orig_asm_constraints_vector;
5512static rtvec copy_asm_constraints_vector;
5513
5514/* Recursively create a new copy of an rtx for copy_insn.
5515 This function differs from copy_rtx in that it handles SCRATCHes and
5516 ASM_OPERANDs properly.
5517 Normally, this function is not used directly; use copy_insn as front end.
5518 However, you could first copy an insn pattern with copy_insn and then use
5519 this function afterwards to properly copy any REG_NOTEs containing
5520 SCRATCHes. */
5521
5522rtx
502b8322 5523copy_insn_1 (rtx orig)
da43a810 5524{
b3694847
SS
5525 rtx copy;
5526 int i, j;
5527 RTX_CODE code;
5528 const char *format_ptr;
da43a810 5529
cd9c1ca8
RH
5530 if (orig == NULL)
5531 return NULL;
5532
da43a810
BS
5533 code = GET_CODE (orig);
5534
5535 switch (code)
5536 {
5537 case REG:
a52a87c3 5538 case DEBUG_EXPR:
d8116890 5539 CASE_CONST_ANY:
da43a810
BS
5540 case SYMBOL_REF:
5541 case CODE_LABEL:
5542 case PC:
5543 case CC0:
276e0224 5544 case RETURN:
26898771 5545 case SIMPLE_RETURN:
da43a810 5546 return orig;
3e89ed8d 5547 case CLOBBER:
c5c5ba89
JH
5548 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5549 clobbers or clobbers of hard registers that originated as pseudos.
5550 This is needed to allow safe register renaming. */
5551 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5552 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
3e89ed8d
JH
5553 return orig;
5554 break;
da43a810
BS
5555
5556 case SCRATCH:
5557 for (i = 0; i < copy_insn_n_scratches; i++)
5558 if (copy_insn_scratch_in[i] == orig)
5559 return copy_insn_scratch_out[i];
5560 break;
5561
5562 case CONST:
6fb5fa3c 5563 if (shared_const_p (orig))
da43a810
BS
5564 return orig;
5565 break;
750c9258 5566
da43a810
BS
5567 /* A MEM with a constant address is not sharable. The problem is that
5568 the constant address may need to be reloaded. If the mem is shared,
5569 then reloading one copy of this mem will cause all copies to appear
5570 to have been reloaded. */
5571
5572 default:
5573 break;
5574 }
5575
aacd3885
RS
5576 /* Copy the various flags, fields, and other information. We assume
5577 that all fields need copying, and then clear the fields that should
da43a810
BS
5578 not be copied. That is the sensible default behavior, and forces
5579 us to explicitly document why we are *not* copying a flag. */
aacd3885 5580 copy = shallow_copy_rtx (orig);
da43a810
BS
5581
5582 /* We do not copy the USED flag, which is used as a mark bit during
5583 walks over the RTL. */
2adc7f12 5584 RTX_FLAG (copy, used) = 0;
da43a810
BS
5585
5586 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5587 if (INSN_P (orig))
da43a810 5588 {
2adc7f12
JJ
5589 RTX_FLAG (copy, jump) = 0;
5590 RTX_FLAG (copy, call) = 0;
5591 RTX_FLAG (copy, frame_related) = 0;
da43a810 5592 }
750c9258 5593
da43a810
BS
5594 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5595
5596 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5597 switch (*format_ptr++)
5598 {
5599 case 'e':
5600 if (XEXP (orig, i) != NULL)
5601 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5602 break;
da43a810 5603
aacd3885
RS
5604 case 'E':
5605 case 'V':
5606 if (XVEC (orig, i) == orig_asm_constraints_vector)
5607 XVEC (copy, i) = copy_asm_constraints_vector;
5608 else if (XVEC (orig, i) == orig_asm_operands_vector)
5609 XVEC (copy, i) = copy_asm_operands_vector;
5610 else if (XVEC (orig, i) != NULL)
5611 {
5612 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5613 for (j = 0; j < XVECLEN (copy, i); j++)
5614 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5615 }
5616 break;
da43a810 5617
aacd3885
RS
5618 case 't':
5619 case 'w':
5620 case 'i':
5621 case 's':
5622 case 'S':
5623 case 'u':
5624 case '0':
5625 /* These are left unchanged. */
5626 break;
da43a810 5627
aacd3885
RS
5628 default:
5629 gcc_unreachable ();
5630 }
da43a810
BS
5631
5632 if (code == SCRATCH)
5633 {
5634 i = copy_insn_n_scratches++;
5b0264cb 5635 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5636 copy_insn_scratch_in[i] = orig;
5637 copy_insn_scratch_out[i] = copy;
5638 }
5639 else if (code == ASM_OPERANDS)
5640 {
6462bb43
AO
5641 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5642 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5643 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5644 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5645 }
5646
5647 return copy;
5648}
5649
5650/* Create a new copy of an rtx.
5651 This function differs from copy_rtx in that it handles SCRATCHes and
5652 ASM_OPERANDs properly.
5653 INSN doesn't really have to be a full INSN; it could be just the
5654 pattern. */
5655rtx
502b8322 5656copy_insn (rtx insn)
da43a810
BS
5657{
5658 copy_insn_n_scratches = 0;
5659 orig_asm_operands_vector = 0;
5660 orig_asm_constraints_vector = 0;
5661 copy_asm_operands_vector = 0;
5662 copy_asm_constraints_vector = 0;
5663 return copy_insn_1 (insn);
5664}
59ec66dc 5665
8e383849
JR
5666/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5667 on that assumption that INSN itself remains in its original place. */
5668
f8f0516e
DM
5669rtx_insn *
5670copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5671{
5672 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5673 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5674 INSN_UID (insn) = cur_insn_uid++;
5675 return insn;
5676}
5677
23b2ce53
RS
5678/* Initialize data structures and variables in this file
5679 before generating rtl for each function. */
5680
5681void
502b8322 5682init_emit (void)
23b2ce53 5683{
5936d944
JH
5684 set_first_insn (NULL);
5685 set_last_insn (NULL);
b5b8b0ac
AO
5686 if (MIN_NONDEBUG_INSN_UID)
5687 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5688 else
5689 cur_insn_uid = 1;
5690 cur_debug_insn_uid = 1;
23b2ce53 5691 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5692 first_label_num = label_num;
49ad7cfa 5693 seq_stack = NULL;
23b2ce53 5694
23b2ce53
RS
5695 /* Init the tables that describe all the pseudo regs. */
5696
3e029763 5697 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5698
3e029763 5699 crtl->emit.regno_pointer_align
1b4572a8 5700 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5701
766090c2 5702 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5703
e50126e8 5704 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5705 memcpy (regno_reg_rtx,
5fb0e246 5706 initial_regno_reg_rtx,
6cde4876 5707 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5708
23b2ce53 5709 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5710 init_virtual_regs ();
740ab4a2
RK
5711
5712 /* Indicate that the virtual registers and stack locations are
5713 all pointers. */
3502dc9c
JDA
5714 REG_POINTER (stack_pointer_rtx) = 1;
5715 REG_POINTER (frame_pointer_rtx) = 1;
5716 REG_POINTER (hard_frame_pointer_rtx) = 1;
5717 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5718
3502dc9c
JDA
5719 REG_POINTER (virtual_incoming_args_rtx) = 1;
5720 REG_POINTER (virtual_stack_vars_rtx) = 1;
5721 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5722 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5723 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5724
86fe05e0 5725#ifdef STACK_BOUNDARY
bdb429a5
RK
5726 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5727 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5728 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5729 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5730
5731 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5732 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5733 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5734 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5735 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5736#endif
5737
5e82e7bd
JVA
5738#ifdef INIT_EXPANDERS
5739 INIT_EXPANDERS;
5740#endif
23b2ce53
RS
5741}
5742
a73b091d 5743/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5744
5745static rtx
a73b091d 5746gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5747{
5748 rtx tem;
5749 rtvec v;
5750 int units, i;
5751 enum machine_mode inner;
5752
5753 units = GET_MODE_NUNITS (mode);
5754 inner = GET_MODE_INNER (mode);
5755
15ed7b52
JG
5756 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5757
69ef87e2
AH
5758 v = rtvec_alloc (units);
5759
a73b091d
JW
5760 /* We need to call this function after we set the scalar const_tiny_rtx
5761 entries. */
5762 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5763
5764 for (i = 0; i < units; ++i)
a73b091d 5765 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5766
a06e3c40 5767 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5768 return tem;
5769}
5770
a06e3c40 5771/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5772 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5773rtx
502b8322 5774gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5775{
a73b091d
JW
5776 enum machine_mode inner = GET_MODE_INNER (mode);
5777 int nunits = GET_MODE_NUNITS (mode);
5778 rtx x;
a06e3c40
R
5779 int i;
5780
a73b091d
JW
5781 /* Check to see if all of the elements have the same value. */
5782 x = RTVEC_ELT (v, nunits - 1);
5783 for (i = nunits - 2; i >= 0; i--)
5784 if (RTVEC_ELT (v, i) != x)
5785 break;
5786
5787 /* If the values are all the same, check to see if we can use one of the
5788 standard constant vectors. */
5789 if (i == -1)
5790 {
5791 if (x == CONST0_RTX (inner))
5792 return CONST0_RTX (mode);
5793 else if (x == CONST1_RTX (inner))
5794 return CONST1_RTX (mode);
e7c82a99
JJ
5795 else if (x == CONSTM1_RTX (inner))
5796 return CONSTM1_RTX (mode);
a73b091d
JW
5797 }
5798
5799 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5800}
5801
b5deb7b6
SL
5802/* Initialise global register information required by all functions. */
5803
5804void
5805init_emit_regs (void)
5806{
5807 int i;
1c3f523e
RS
5808 enum machine_mode mode;
5809 mem_attrs *attrs;
b5deb7b6
SL
5810
5811 /* Reset register attributes */
5812 htab_empty (reg_attrs_htab);
5813
5814 /* We need reg_raw_mode, so initialize the modes now. */
5815 init_reg_modes_target ();
5816
5817 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5818 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5819 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5820 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5821 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5822 virtual_incoming_args_rtx =
5823 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5824 virtual_stack_vars_rtx =
5825 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5826 virtual_stack_dynamic_rtx =
5827 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5828 virtual_outgoing_args_rtx =
5829 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5830 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5831 virtual_preferred_stack_boundary_rtx =
5832 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5833
5834 /* Initialize RTL for commonly used hard registers. These are
5835 copied into regno_reg_rtx as we begin to compile each function. */
5836 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5837 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5838
5839#ifdef RETURN_ADDRESS_POINTER_REGNUM
5840 return_address_pointer_rtx
5841 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5842#endif
5843
b5deb7b6
SL
5844 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5845 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5846 else
5847 pic_offset_table_rtx = NULL_RTX;
1c3f523e
RS
5848
5849 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5850 {
5851 mode = (enum machine_mode) i;
766090c2 5852 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
5853 attrs->align = BITS_PER_UNIT;
5854 attrs->addrspace = ADDR_SPACE_GENERIC;
5855 if (mode != BLKmode)
5856 {
754c3d5d
RS
5857 attrs->size_known_p = true;
5858 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5859 if (STRICT_ALIGNMENT)
5860 attrs->align = GET_MODE_ALIGNMENT (mode);
5861 }
5862 mode_mem_attrs[i] = attrs;
5863 }
b5deb7b6
SL
5864}
5865
aa3a12d6
RS
5866/* Initialize global machine_mode variables. */
5867
5868void
5869init_derived_machine_modes (void)
5870{
5871 byte_mode = VOIDmode;
5872 word_mode = VOIDmode;
5873
5874 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5875 mode != VOIDmode;
5876 mode = GET_MODE_WIDER_MODE (mode))
5877 {
5878 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5879 && byte_mode == VOIDmode)
5880 byte_mode = mode;
5881
5882 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5883 && word_mode == VOIDmode)
5884 word_mode = mode;
5885 }
5886
5887 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5888}
5889
2d888286 5890/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5891
5892void
2d888286 5893init_emit_once (void)
23b2ce53
RS
5894{
5895 int i;
5896 enum machine_mode mode;
9ec36da5 5897 enum machine_mode double_mode;
23b2ce53 5898
807e902e
KZ
5899 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5900 CONST_FIXED, and memory attribute hash tables. */
17211ab5
GK
5901 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5902 const_int_htab_eq, NULL);
173b24b9 5903
807e902e
KZ
5904#if TARGET_SUPPORTS_WIDE_INT
5905 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5906 const_wide_int_htab_eq, NULL);
5907#endif
17211ab5
GK
5908 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5909 const_double_htab_eq, NULL);
5692c7bc 5910
091a3ac7
CF
5911 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5912 const_fixed_htab_eq, NULL);
5913
a560d4d4
JH
5914 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5915 reg_attrs_htab_eq, NULL);
67673f5c 5916
5da077de 5917#ifdef INIT_EXPANDERS
414c4dc4
NC
5918 /* This is to initialize {init|mark|free}_machine_status before the first
5919 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5920 end which calls push_function_context_to before the first call to
5da077de
AS
5921 init_function_start. */
5922 INIT_EXPANDERS;
5923#endif
5924
23b2ce53
RS
5925 /* Create the unique rtx's for certain rtx codes and operand values. */
5926
a2a8cc44 5927 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5928 tries to use these variables. */
23b2ce53 5929 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5930 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5931 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5932
68d75312
JC
5933 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5934 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5935 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5936 else
3b80f6ca 5937 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5938
aa3a12d6
RS
5939 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5940
807e902e
KZ
5941 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5942 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5943 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
5944
5945 dconstm1 = dconst1;
5946 dconstm1.sign = 1;
03f2ea93
RS
5947
5948 dconsthalf = dconst1;
1e92bbb9 5949 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5950
e7c82a99 5951 for (i = 0; i < 3; i++)
23b2ce53 5952 {
aefa9d43 5953 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5954 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5955
15ed7b52
JG
5956 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5957 mode != VOIDmode;
5958 mode = GET_MODE_WIDER_MODE (mode))
5959 const_tiny_rtx[i][(int) mode] =
5960 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5961
5962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5963 mode != VOIDmode;
23b2ce53 5964 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5965 const_tiny_rtx[i][(int) mode] =
5966 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5967
906c4e36 5968 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5969
15ed7b52
JG
5970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5971 mode != VOIDmode;
23b2ce53 5972 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5973 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5974
ede6c734
MS
5975 for (mode = MIN_MODE_PARTIAL_INT;
5976 mode <= MAX_MODE_PARTIAL_INT;
5977 mode = (enum machine_mode)((int)(mode) + 1))
33d3e559 5978 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5979 }
5980
e7c82a99
JJ
5981 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5982
5983 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5984 mode != VOIDmode;
5985 mode = GET_MODE_WIDER_MODE (mode))
5986 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5987
ede6c734
MS
5988 for (mode = MIN_MODE_PARTIAL_INT;
5989 mode <= MAX_MODE_PARTIAL_INT;
5990 mode = (enum machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5991 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5992
e90721b1
AP
5993 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5994 mode != VOIDmode;
5995 mode = GET_MODE_WIDER_MODE (mode))
5996 {
5997 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5998 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5999 }
6000
6001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6002 mode != VOIDmode;
6003 mode = GET_MODE_WIDER_MODE (mode))
6004 {
6005 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6006 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6007 }
6008
69ef87e2
AH
6009 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6010 mode != VOIDmode;
6011 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
6012 {
6013 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6014 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 6015 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 6016 }
69ef87e2
AH
6017
6018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6019 mode != VOIDmode;
6020 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
6021 {
6022 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6023 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6024 }
69ef87e2 6025
325217ed
CF
6026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6027 mode != VOIDmode;
6028 mode = GET_MODE_WIDER_MODE (mode))
6029 {
c3284718
RS
6030 FCONST0 (mode).data.high = 0;
6031 FCONST0 (mode).data.low = 0;
6032 FCONST0 (mode).mode = mode;
091a3ac7
CF
6033 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6034 FCONST0 (mode), mode);
325217ed
CF
6035 }
6036
6037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6038 mode != VOIDmode;
6039 mode = GET_MODE_WIDER_MODE (mode))
6040 {
c3284718
RS
6041 FCONST0 (mode).data.high = 0;
6042 FCONST0 (mode).data.low = 0;
6043 FCONST0 (mode).mode = mode;
091a3ac7
CF
6044 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6045 FCONST0 (mode), mode);
325217ed
CF
6046 }
6047
6048 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6049 mode != VOIDmode;
6050 mode = GET_MODE_WIDER_MODE (mode))
6051 {
c3284718
RS
6052 FCONST0 (mode).data.high = 0;
6053 FCONST0 (mode).data.low = 0;
6054 FCONST0 (mode).mode = mode;
091a3ac7
CF
6055 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6056 FCONST0 (mode), mode);
325217ed
CF
6057
6058 /* We store the value 1. */
c3284718
RS
6059 FCONST1 (mode).data.high = 0;
6060 FCONST1 (mode).data.low = 0;
6061 FCONST1 (mode).mode = mode;
6062 FCONST1 (mode).data
9be0ac8c
LC
6063 = double_int_one.lshift (GET_MODE_FBIT (mode),
6064 HOST_BITS_PER_DOUBLE_INT,
6065 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6066 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6067 FCONST1 (mode), mode);
325217ed
CF
6068 }
6069
6070 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6071 mode != VOIDmode;
6072 mode = GET_MODE_WIDER_MODE (mode))
6073 {
c3284718
RS
6074 FCONST0 (mode).data.high = 0;
6075 FCONST0 (mode).data.low = 0;
6076 FCONST0 (mode).mode = mode;
091a3ac7
CF
6077 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6078 FCONST0 (mode), mode);
325217ed
CF
6079
6080 /* We store the value 1. */
c3284718
RS
6081 FCONST1 (mode).data.high = 0;
6082 FCONST1 (mode).data.low = 0;
6083 FCONST1 (mode).mode = mode;
6084 FCONST1 (mode).data
9be0ac8c
LC
6085 = double_int_one.lshift (GET_MODE_FBIT (mode),
6086 HOST_BITS_PER_DOUBLE_INT,
6087 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6088 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6089 FCONST1 (mode), mode);
6090 }
6091
6092 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6093 mode != VOIDmode;
6094 mode = GET_MODE_WIDER_MODE (mode))
6095 {
6096 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6097 }
6098
6099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6100 mode != VOIDmode;
6101 mode = GET_MODE_WIDER_MODE (mode))
6102 {
6103 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6104 }
6105
6106 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6107 mode != VOIDmode;
6108 mode = GET_MODE_WIDER_MODE (mode))
6109 {
6110 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6111 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6112 }
6113
6114 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6115 mode != VOIDmode;
6116 mode = GET_MODE_WIDER_MODE (mode))
6117 {
6118 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6119 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6120 }
6121
dbbbbf3b
JDA
6122 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6123 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6124 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6125
f0417c82
RH
6126 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6127 if (STORE_FLAG_VALUE == 1)
6128 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91
RS
6129
6130 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6131 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6132 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6133 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
23b2ce53 6134}
a11759a3 6135\f
969d70ca
JH
6136/* Produce exact duplicate of insn INSN after AFTER.
6137 Care updating of libcall regions if present. */
6138
cd459bf8 6139rtx_insn *
502b8322 6140emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 6141{
cd459bf8
DM
6142 rtx_insn *new_rtx;
6143 rtx link;
969d70ca
JH
6144
6145 switch (GET_CODE (insn))
6146 {
6147 case INSN:
60564289 6148 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6149 break;
6150
6151 case JUMP_INSN:
60564289 6152 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6153 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6154 break;
6155
b5b8b0ac
AO
6156 case DEBUG_INSN:
6157 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6158 break;
6159
969d70ca 6160 case CALL_INSN:
60564289 6161 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6162 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6163 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6164 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6165 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6166 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6167 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6168 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6169 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6170 break;
6171
6172 default:
5b0264cb 6173 gcc_unreachable ();
969d70ca
JH
6174 }
6175
6176 /* Update LABEL_NUSES. */
60564289 6177 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6178
5368224f 6179 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6180
0a3d71f5
JW
6181 /* If the old insn is frame related, then so is the new one. This is
6182 primarily needed for IA-64 unwind info which marks epilogue insns,
6183 which may be duplicated by the basic block reordering code. */
60564289 6184 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6185
cf7c4aa6
HPN
6186 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6187 will make them. REG_LABEL_TARGETs are created there too, but are
6188 supposed to be sticky, so we copy them. */
969d70ca 6189 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6190 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
6191 {
6192 if (GET_CODE (link) == EXPR_LIST)
60564289 6193 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 6194 copy_insn_1 (XEXP (link, 0)));
969d70ca 6195 else
e5af9ddd 6196 add_shallow_copy_of_reg_note (new_rtx, link);
969d70ca
JH
6197 }
6198
60564289
KG
6199 INSN_CODE (new_rtx) = INSN_CODE (insn);
6200 return new_rtx;
969d70ca 6201}
e2500fed 6202
1431042e 6203static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
6204rtx
6205gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6206{
6207 if (hard_reg_clobbers[mode][regno])
6208 return hard_reg_clobbers[mode][regno];
6209 else
6210 return (hard_reg_clobbers[mode][regno] =
6211 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6212}
6213
5368224f
DC
6214location_t prologue_location;
6215location_t epilogue_location;
78bde837
SB
6216
6217/* Hold current location information and last location information, so the
6218 datastructures are built lazily only when some instructions in given
6219 place are needed. */
3a50da34 6220static location_t curr_location;
78bde837 6221
5368224f 6222/* Allocate insn location datastructure. */
78bde837 6223void
5368224f 6224insn_locations_init (void)
78bde837 6225{
5368224f 6226 prologue_location = epilogue_location = 0;
78bde837 6227 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6228}
6229
6230/* At the end of emit stage, clear current location. */
6231void
5368224f 6232insn_locations_finalize (void)
78bde837 6233{
5368224f
DC
6234 epilogue_location = curr_location;
6235 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6236}
6237
6238/* Set current location. */
6239void
5368224f 6240set_curr_insn_location (location_t location)
78bde837 6241{
78bde837
SB
6242 curr_location = location;
6243}
6244
6245/* Get current location. */
6246location_t
5368224f 6247curr_insn_location (void)
78bde837
SB
6248{
6249 return curr_location;
6250}
6251
78bde837
SB
6252/* Return lexical scope block insn belongs to. */
6253tree
6254insn_scope (const_rtx insn)
6255{
5368224f 6256 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6257}
6258
6259/* Return line number of the statement that produced this insn. */
6260int
6261insn_line (const_rtx insn)
6262{
5368224f 6263 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6264}
6265
6266/* Return source file of the statement that produced this insn. */
6267const char *
6268insn_file (const_rtx insn)
6269{
5368224f 6270 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6271}
8930883e 6272
ffa4602f
EB
6273/* Return expanded location of the statement that produced this insn. */
6274expanded_location
6275insn_location (const_rtx insn)
6276{
6277 return expand_location (INSN_LOCATION (insn));
6278}
6279
8930883e
MK
6280/* Return true if memory model MODEL requires a pre-operation (release-style)
6281 barrier or a post-operation (acquire-style) barrier. While not universal,
6282 this function matches behavior of several targets. */
6283
6284bool
6285need_atomic_barrier_p (enum memmodel model, bool pre)
6286{
88e784e6 6287 switch (model & MEMMODEL_MASK)
8930883e
MK
6288 {
6289 case MEMMODEL_RELAXED:
6290 case MEMMODEL_CONSUME:
6291 return false;
6292 case MEMMODEL_RELEASE:
6293 return pre;
6294 case MEMMODEL_ACQUIRE:
6295 return !pre;
6296 case MEMMODEL_ACQ_REL:
6297 case MEMMODEL_SEQ_CST:
6298 return true;
6299 default:
6300 gcc_unreachable ();
6301 }
6302}
6303\f
e2500fed 6304#include "gt-emit-rtl.h"