]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
use rtx_insn * more
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
23a5b65a 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
718f9c0f 38#include "diagnostic-core.h"
23b2ce53 39#include "rtl.h"
a25c7971 40#include "tree.h"
d8a2d370 41#include "varasm.h"
2fb9a547
AM
42#include "basic-block.h"
43#include "tree-eh.h"
6baf1cc8 44#include "tm_p.h"
23b2ce53
RS
45#include "flags.h"
46#include "function.h"
d8a2d370 47#include "stringpool.h"
23b2ce53
RS
48#include "expr.h"
49#include "regs.h"
aff48bca 50#include "hard-reg-set.h"
c13e8210 51#include "hashtab.h"
23b2ce53 52#include "insn-config.h"
e9a25f70 53#include "recog.h"
0dfa1860 54#include "bitmap.h"
e1772ac0 55#include "debug.h"
d23c55c2 56#include "langhooks.h"
6fb5fa3c 57#include "df.h"
b5b8b0ac 58#include "params.h"
d4ebfa65 59#include "target.h"
9b2b7279 60#include "builtins.h"
9021b8ec 61#include "rtl-iter.h"
ca695ac9 62
5fb0e246
RS
63struct target_rtl default_target_rtl;
64#if SWITCHABLE_TARGET
65struct target_rtl *this_target_rtl = &default_target_rtl;
66#endif
67
68#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
1d445e9e
ILT
70/* Commonly used modes. */
71
0f41302f
MS
72enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 74enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 75enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 76
bd60bab2
JH
77/* Datastructures maintained for currently processed function in RTL form. */
78
3e029763 79struct rtl_data x_rtl;
bd60bab2
JH
80
81/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 82 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86rtx * regno_reg_rtx;
23b2ce53
RS
87
88/* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
044b4de3 91static GTY(()) int label_num = 1;
23b2ce53 92
23b2ce53
RS
93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 97
e7c82a99 98rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 99
68d75312
JC
100rtx const_true_rtx;
101
23b2ce53
RS
102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
03f2ea93 106REAL_VALUE_TYPE dconsthalf;
23b2ce53 107
325217ed
CF
108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
23b2ce53
RS
112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
5da077de 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 118
ca4adc91
RS
119/* Standard pieces of rtx, to be substituted directly into things. */
120rtx pc_rtx;
121rtx ret_rtx;
122rtx simple_return_rtx;
123rtx cc0_rtx;
124
c13e8210
MM
125/* A hash table storing CONST_INTs whose absolute value is greater
126 than MAX_SAVED_CONST_INT. */
127
e2500fed
GK
128static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t const_int_htab;
c13e8210 130
807e902e
KZ
131static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
132 htab_t const_wide_int_htab;
133
a560d4d4
JH
134/* A hash table storing register attribute structures. */
135static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
136 htab_t reg_attrs_htab;
137
5692c7bc 138/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
139static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
140 htab_t const_double_htab;
5692c7bc 141
091a3ac7
CF
142/* A hash table storing all CONST_FIXEDs. */
143static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
144 htab_t const_fixed_htab;
145
3e029763 146#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 147#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 148#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 149
5eb2a9f2 150static void set_used_decls (tree);
502b8322
AJ
151static void mark_label_nuses (rtx);
152static hashval_t const_int_htab_hash (const void *);
153static int const_int_htab_eq (const void *, const void *);
807e902e
KZ
154#if TARGET_SUPPORTS_WIDE_INT
155static hashval_t const_wide_int_htab_hash (const void *);
156static int const_wide_int_htab_eq (const void *, const void *);
157static rtx lookup_const_wide_int (rtx);
158#endif
502b8322
AJ
159static hashval_t const_double_htab_hash (const void *);
160static int const_double_htab_eq (const void *, const void *);
161static rtx lookup_const_double (rtx);
091a3ac7
CF
162static hashval_t const_fixed_htab_hash (const void *);
163static int const_fixed_htab_eq (const void *, const void *);
164static rtx lookup_const_fixed (rtx);
502b8322
AJ
165static hashval_t reg_attrs_htab_hash (const void *);
166static int reg_attrs_htab_eq (const void *, const void *);
167static reg_attrs *get_reg_attrs (tree, int);
a73b091d 168static rtx gen_const_vector (enum machine_mode, int);
32b32b16 169static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 170
6b24c259
JH
171/* Probability of the conditional branch currently proceeded by try_split.
172 Set to -1 otherwise. */
173int split_branch_probability = -1;
ca695ac9 174\f
c13e8210
MM
175/* Returns a hash code for X (which is a really a CONST_INT). */
176
177static hashval_t
502b8322 178const_int_htab_hash (const void *x)
c13e8210 179{
f7d504c2 180 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
181}
182
cc2902df 183/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
184 CONST_INT) is the same as that given by Y (which is really a
185 HOST_WIDE_INT *). */
186
187static int
502b8322 188const_int_htab_eq (const void *x, const void *y)
c13e8210 189{
f7d504c2 190 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
191}
192
807e902e
KZ
193#if TARGET_SUPPORTS_WIDE_INT
194/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
195
196static hashval_t
197const_wide_int_htab_hash (const void *x)
198{
199 int i;
200 HOST_WIDE_INT hash = 0;
201 const_rtx xr = (const_rtx) x;
202
203 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
204 hash += CONST_WIDE_INT_ELT (xr, i);
205
206 return (hashval_t) hash;
207}
208
209/* Returns nonzero if the value represented by X (which is really a
210 CONST_WIDE_INT) is the same as that given by Y (which is really a
211 CONST_WIDE_INT). */
212
213static int
214const_wide_int_htab_eq (const void *x, const void *y)
215{
216 int i;
217 const_rtx xr = (const_rtx) x;
218 const_rtx yr = (const_rtx) y;
219 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
220 return 0;
221
222 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
223 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
224 return 0;
225
226 return 1;
227}
228#endif
229
5692c7bc
ZW
230/* Returns a hash code for X (which is really a CONST_DOUBLE). */
231static hashval_t
502b8322 232const_double_htab_hash (const void *x)
5692c7bc 233{
f7d504c2 234 const_rtx const value = (const_rtx) x;
46b33600 235 hashval_t h;
5692c7bc 236
807e902e 237 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
238 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
239 else
fe352c29 240 {
15c812e3 241 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
242 /* MODE is used in the comparison, so it should be in the hash. */
243 h ^= GET_MODE (value);
244 }
5692c7bc
ZW
245 return h;
246}
247
cc2902df 248/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
249 is the same as that represented by Y (really a ...) */
250static int
502b8322 251const_double_htab_eq (const void *x, const void *y)
5692c7bc 252{
f7d504c2 253 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
254
255 if (GET_MODE (a) != GET_MODE (b))
256 return 0;
807e902e 257 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
258 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
259 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
260 else
261 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
262 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
263}
264
091a3ac7
CF
265/* Returns a hash code for X (which is really a CONST_FIXED). */
266
267static hashval_t
268const_fixed_htab_hash (const void *x)
269{
3101faab 270 const_rtx const value = (const_rtx) x;
091a3ac7
CF
271 hashval_t h;
272
273 h = fixed_hash (CONST_FIXED_VALUE (value));
274 /* MODE is used in the comparison, so it should be in the hash. */
275 h ^= GET_MODE (value);
276 return h;
277}
278
279/* Returns nonzero if the value represented by X (really a ...)
280 is the same as that represented by Y (really a ...). */
281
282static int
283const_fixed_htab_eq (const void *x, const void *y)
284{
3101faab 285 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
286
287 if (GET_MODE (a) != GET_MODE (b))
288 return 0;
289 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
290}
291
f12144dd 292/* Return true if the given memory attributes are equal. */
c13e8210 293
96b3c03f 294bool
f12144dd 295mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 296{
96b3c03f
RB
297 if (p == q)
298 return true;
299 if (!p || !q)
300 return false;
754c3d5d
RS
301 return (p->alias == q->alias
302 && p->offset_known_p == q->offset_known_p
303 && (!p->offset_known_p || p->offset == q->offset)
304 && p->size_known_p == q->size_known_p
305 && (!p->size_known_p || p->size == q->size)
306 && p->align == q->align
09e881c9 307 && p->addrspace == q->addrspace
78b76d08
SB
308 && (p->expr == q->expr
309 || (p->expr != NULL_TREE && q->expr != NULL_TREE
310 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
311}
312
f12144dd 313/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 314
f12144dd
RS
315static void
316set_mem_attrs (rtx mem, mem_attrs *attrs)
317{
f12144dd
RS
318 /* If everything is the default, we can just clear the attributes. */
319 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
320 {
321 MEM_ATTRS (mem) = 0;
322 return;
323 }
173b24b9 324
84053e02
RB
325 if (!MEM_ATTRS (mem)
326 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 327 {
766090c2 328 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 329 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 330 }
c13e8210
MM
331}
332
a560d4d4
JH
333/* Returns a hash code for X (which is a really a reg_attrs *). */
334
335static hashval_t
502b8322 336reg_attrs_htab_hash (const void *x)
a560d4d4 337{
741ac903 338 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4 339
9841210f 340 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
341}
342
6356f892 343/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
344 reg_attrs *) is the same as that given by Y (which is also really a
345 reg_attrs *). */
346
347static int
502b8322 348reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 349{
741ac903
KG
350 const reg_attrs *const p = (const reg_attrs *) x;
351 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
352
353 return (p->decl == q->decl && p->offset == q->offset);
354}
355/* Allocate a new reg_attrs structure and insert it into the hash table if
356 one identical to it is not already in the table. We are doing this for
357 MEM of mode MODE. */
358
359static reg_attrs *
502b8322 360get_reg_attrs (tree decl, int offset)
a560d4d4
JH
361{
362 reg_attrs attrs;
363 void **slot;
364
365 /* If everything is the default, we can just return zero. */
366 if (decl == 0 && offset == 0)
367 return 0;
368
369 attrs.decl = decl;
370 attrs.offset = offset;
371
372 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
373 if (*slot == 0)
374 {
766090c2 375 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
376 memcpy (*slot, &attrs, sizeof (reg_attrs));
377 }
378
1b4572a8 379 return (reg_attrs *) *slot;
a560d4d4
JH
380}
381
6fb5fa3c
DB
382
383#if !HAVE_blockage
adddc347
HPN
384/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
385 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
386
387rtx
388gen_blockage (void)
389{
390 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
391 MEM_VOLATILE_P (x) = true;
392 return x;
393}
394#endif
395
396
08394eef
BS
397/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
398 don't attempt to share with the various global pieces of rtl (such as
399 frame_pointer_rtx). */
400
401rtx
502b8322 402gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
403{
404 rtx x = gen_rtx_raw_REG (mode, regno);
405 ORIGINAL_REGNO (x) = regno;
406 return x;
407}
408
c5c76735
JL
409/* There are some RTL codes that require special attention; the generation
410 functions do the raw handling. If you add to this list, modify
411 special_rtx in gengenrtl.c as well. */
412
38e60c55
DM
413rtx_expr_list *
414gen_rtx_EXPR_LIST (enum machine_mode mode, rtx expr, rtx expr_list)
415{
416 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
417 expr_list));
418}
419
a756c6be
DM
420rtx_insn_list *
421gen_rtx_INSN_LIST (enum machine_mode mode, rtx insn, rtx insn_list)
422{
423 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
424 insn_list));
425}
426
d6e1e8b8
DM
427rtx_insn *
428gen_rtx_INSN (enum machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
429 basic_block bb, rtx pattern, int location, int code,
430 rtx reg_notes)
431{
432 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
433 prev_insn, next_insn,
434 bb, pattern, location, code,
435 reg_notes));
436}
437
3b80f6ca 438rtx
502b8322 439gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 440{
c13e8210
MM
441 void **slot;
442
3b80f6ca 443 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 444 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
445
446#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
447 if (const_true_rtx && arg == STORE_FLAG_VALUE)
448 return const_true_rtx;
449#endif
450
c13e8210 451 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
452 slot = htab_find_slot_with_hash (const_int_htab, &arg,
453 (hashval_t) arg, INSERT);
29105cea 454 if (*slot == 0)
1f8f4a0b 455 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
456
457 return (rtx) *slot;
3b80f6ca
RH
458}
459
2496c7bd 460rtx
502b8322 461gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
462{
463 return GEN_INT (trunc_int_for_mode (c, mode));
464}
465
5692c7bc
ZW
466/* CONST_DOUBLEs might be created from pairs of integers, or from
467 REAL_VALUE_TYPEs. Also, their length is known only at run time,
468 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
469
470/* Determine whether REAL, a CONST_DOUBLE, already exists in the
471 hash table. If so, return its counterpart; otherwise add it
472 to the hash table and return it. */
473static rtx
502b8322 474lookup_const_double (rtx real)
5692c7bc
ZW
475{
476 void **slot = htab_find_slot (const_double_htab, real, INSERT);
477 if (*slot == 0)
478 *slot = real;
479
480 return (rtx) *slot;
481}
29105cea 482
5692c7bc
ZW
483/* Return a CONST_DOUBLE rtx for a floating-point value specified by
484 VALUE in mode MODE. */
0133b7d9 485rtx
502b8322 486const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 487{
5692c7bc
ZW
488 rtx real = rtx_alloc (CONST_DOUBLE);
489 PUT_MODE (real, mode);
490
9e254451 491 real->u.rv = value;
5692c7bc
ZW
492
493 return lookup_const_double (real);
494}
495
091a3ac7
CF
496/* Determine whether FIXED, a CONST_FIXED, already exists in the
497 hash table. If so, return its counterpart; otherwise add it
498 to the hash table and return it. */
499
500static rtx
501lookup_const_fixed (rtx fixed)
502{
503 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
504 if (*slot == 0)
505 *slot = fixed;
506
507 return (rtx) *slot;
508}
509
510/* Return a CONST_FIXED rtx for a fixed-point value specified by
511 VALUE in mode MODE. */
512
513rtx
514const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
515{
516 rtx fixed = rtx_alloc (CONST_FIXED);
517 PUT_MODE (fixed, mode);
518
519 fixed->u.fv = value;
520
521 return lookup_const_fixed (fixed);
522}
523
807e902e 524#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
525/* Constructs double_int from rtx CST. */
526
527double_int
528rtx_to_double_int (const_rtx cst)
529{
530 double_int r;
531
532 if (CONST_INT_P (cst))
27bcd47c 533 r = double_int::from_shwi (INTVAL (cst));
48175537 534 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
535 {
536 r.low = CONST_DOUBLE_LOW (cst);
537 r.high = CONST_DOUBLE_HIGH (cst);
538 }
539 else
540 gcc_unreachable ();
541
542 return r;
543}
807e902e 544#endif
3e93ff81 545
807e902e
KZ
546#if TARGET_SUPPORTS_WIDE_INT
547/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
548 If so, return its counterpart; otherwise add it to the hash table and
549 return it. */
3e93ff81 550
807e902e
KZ
551static rtx
552lookup_const_wide_int (rtx wint)
553{
554 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
555 if (*slot == 0)
556 *slot = wint;
557
558 return (rtx) *slot;
559}
560#endif
561
562/* Return an rtx constant for V, given that the constant has mode MODE.
563 The returned rtx will be a CONST_INT if V fits, otherwise it will be
564 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
565 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0
AS
566
567rtx
807e902e 568immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
54fb1ae0 569{
807e902e
KZ
570 unsigned int len = v.get_len ();
571 unsigned int prec = GET_MODE_PRECISION (mode);
572
573 /* Allow truncation but not extension since we do not know if the
574 number is signed or unsigned. */
575 gcc_assert (prec <= v.get_precision ());
576
577 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
578 return gen_int_mode (v.elt (0), mode);
579
580#if TARGET_SUPPORTS_WIDE_INT
581 {
582 unsigned int i;
583 rtx value;
584 unsigned int blocks_needed
585 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
586
587 if (len > blocks_needed)
588 len = blocks_needed;
589
590 value = const_wide_int_alloc (len);
591
592 /* It is so tempting to just put the mode in here. Must control
593 myself ... */
594 PUT_MODE (value, VOIDmode);
595 CWI_PUT_NUM_ELEM (value, len);
596
597 for (i = 0; i < len; i++)
598 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
599
600 return lookup_const_wide_int (value);
601 }
602#else
603 return immed_double_const (v.elt (0), v.elt (1), mode);
604#endif
54fb1ae0
AS
605}
606
807e902e 607#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
608/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
609 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 610 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
611 implied upper bits are copies of the high bit of i1. The value
612 itself is neither signed nor unsigned. Do not use this routine for
613 non-integer modes; convert to REAL_VALUE_TYPE and use
614 CONST_DOUBLE_FROM_REAL_VALUE. */
5692c7bc
ZW
615
616rtx
502b8322 617immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
618{
619 rtx value;
620 unsigned int i;
621
65acccdd 622 /* There are the following cases (note that there are no modes with
49ab6098 623 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
624
625 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
626 gen_int_mode.
929e10f4
MS
627 2) If the value of the integer fits into HOST_WIDE_INT anyway
628 (i.e., i1 consists only from copies of the sign bit, and sign
629 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 630 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
631 if (mode != VOIDmode)
632 {
5b0264cb
NS
633 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
634 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
635 /* We can get a 0 for an error mark. */
636 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
637 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 638
65acccdd
ZD
639 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
640 return gen_int_mode (i0, mode);
5692c7bc
ZW
641 }
642
643 /* If this integer fits in one word, return a CONST_INT. */
644 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
645 return GEN_INT (i0);
646
647 /* We use VOIDmode for integers. */
648 value = rtx_alloc (CONST_DOUBLE);
649 PUT_MODE (value, VOIDmode);
650
651 CONST_DOUBLE_LOW (value) = i0;
652 CONST_DOUBLE_HIGH (value) = i1;
653
654 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
655 XWINT (value, i) = 0;
656
657 return lookup_const_double (value);
0133b7d9 658}
807e902e 659#endif
0133b7d9 660
3b80f6ca 661rtx
502b8322 662gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
663{
664 /* In case the MD file explicitly references the frame pointer, have
665 all such references point to the same frame pointer. This is
666 used during frame pointer elimination to distinguish the explicit
667 references to these registers from pseudos that happened to be
668 assigned to them.
669
670 If we have eliminated the frame pointer or arg pointer, we will
671 be using it as a normal register, for example as a spill
672 register. In such cases, we might be accessing it in a mode that
673 is not Pmode and therefore cannot use the pre-allocated rtx.
674
675 Also don't do this when we are making new REGs in reload, since
676 we don't want to get confused with the real pointers. */
677
55a2c322 678 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 679 {
e10c79fe
LB
680 if (regno == FRAME_POINTER_REGNUM
681 && (!reload_completed || frame_pointer_needed))
3b80f6ca 682 return frame_pointer_rtx;
e3339d0f 683#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
684 if (regno == HARD_FRAME_POINTER_REGNUM
685 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
686 return hard_frame_pointer_rtx;
687#endif
e3339d0f 688#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 689 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
690 return arg_pointer_rtx;
691#endif
692#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 693 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
694 return return_address_pointer_rtx;
695#endif
fc555370 696 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 697 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 698 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 699 return pic_offset_table_rtx;
bcb33994 700 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
701 return stack_pointer_rtx;
702 }
703
006a94b0 704#if 0
6cde4876 705 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
706 an existing entry in that table to avoid useless generation of RTL.
707
708 This code is disabled for now until we can fix the various backends
709 which depend on having non-shared hard registers in some cases. Long
710 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
711 on the amount of useless RTL that gets generated.
712
713 We'll also need to fix some code that runs after reload that wants to
714 set ORIGINAL_REGNO. */
715
6cde4876
JL
716 if (cfun
717 && cfun->emit
718 && regno_reg_rtx
719 && regno < FIRST_PSEUDO_REGISTER
720 && reg_raw_mode[regno] == mode)
721 return regno_reg_rtx[regno];
006a94b0 722#endif
6cde4876 723
08394eef 724 return gen_raw_REG (mode, regno);
3b80f6ca
RH
725}
726
41472af8 727rtx
502b8322 728gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
729{
730 rtx rt = gen_rtx_raw_MEM (mode, addr);
731
732 /* This field is not cleared by the mere allocation of the rtx, so
733 we clear it here. */
173b24b9 734 MEM_ATTRS (rt) = 0;
41472af8
MM
735
736 return rt;
737}
ddef6bc7 738
542a8afa
RH
739/* Generate a memory referring to non-trapping constant memory. */
740
741rtx
742gen_const_mem (enum machine_mode mode, rtx addr)
743{
744 rtx mem = gen_rtx_MEM (mode, addr);
745 MEM_READONLY_P (mem) = 1;
746 MEM_NOTRAP_P (mem) = 1;
747 return mem;
748}
749
bf877a76
R
750/* Generate a MEM referring to fixed portions of the frame, e.g., register
751 save areas. */
752
753rtx
754gen_frame_mem (enum machine_mode mode, rtx addr)
755{
756 rtx mem = gen_rtx_MEM (mode, addr);
757 MEM_NOTRAP_P (mem) = 1;
758 set_mem_alias_set (mem, get_frame_alias_set ());
759 return mem;
760}
761
762/* Generate a MEM referring to a temporary use of the stack, not part
763 of the fixed stack frame. For example, something which is pushed
764 by a target splitter. */
765rtx
766gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
767{
768 rtx mem = gen_rtx_MEM (mode, addr);
769 MEM_NOTRAP_P (mem) = 1;
e3b5732b 770 if (!cfun->calls_alloca)
bf877a76
R
771 set_mem_alias_set (mem, get_frame_alias_set ());
772 return mem;
773}
774
beb72684
RH
775/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
776 this construct would be valid, and false otherwise. */
777
778bool
779validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 780 const_rtx reg, unsigned int offset)
ddef6bc7 781{
beb72684
RH
782 unsigned int isize = GET_MODE_SIZE (imode);
783 unsigned int osize = GET_MODE_SIZE (omode);
784
785 /* All subregs must be aligned. */
786 if (offset % osize != 0)
787 return false;
788
789 /* The subreg offset cannot be outside the inner object. */
790 if (offset >= isize)
791 return false;
792
793 /* ??? This should not be here. Temporarily continue to allow word_mode
794 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
795 Generally, backends are doing something sketchy but it'll take time to
796 fix them all. */
797 if (omode == word_mode)
798 ;
799 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
800 is the culprit here, and not the backends. */
801 else if (osize >= UNITS_PER_WORD && isize >= osize)
802 ;
803 /* Allow component subregs of complex and vector. Though given the below
804 extraction rules, it's not always clear what that means. */
805 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
806 && GET_MODE_INNER (imode) == omode)
807 ;
808 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
809 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
810 represent this. It's questionable if this ought to be represented at
811 all -- why can't this all be hidden in post-reload splitters that make
812 arbitrarily mode changes to the registers themselves. */
813 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
814 ;
815 /* Subregs involving floating point modes are not allowed to
816 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
817 (subreg:SI (reg:DF) 0) isn't. */
818 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
819 {
55a2c322
VM
820 if (! (isize == osize
821 /* LRA can use subreg to store a floating point value in
822 an integer mode. Although the floating point and the
823 integer modes need the same number of hard registers,
824 the size of floating point mode can be less than the
825 integer mode. LRA also uses subregs for a register
826 should be used in different mode in on insn. */
827 || lra_in_progress))
beb72684
RH
828 return false;
829 }
ddef6bc7 830
beb72684
RH
831 /* Paradoxical subregs must have offset zero. */
832 if (osize > isize)
833 return offset == 0;
834
835 /* This is a normal subreg. Verify that the offset is representable. */
836
837 /* For hard registers, we already have most of these rules collected in
838 subreg_offset_representable_p. */
839 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
840 {
841 unsigned int regno = REGNO (reg);
842
843#ifdef CANNOT_CHANGE_MODE_CLASS
844 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
845 && GET_MODE_INNER (imode) == omode)
846 ;
847 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
848 return false;
ddef6bc7 849#endif
beb72684
RH
850
851 return subreg_offset_representable_p (regno, imode, offset, omode);
852 }
853
854 /* For pseudo registers, we want most of the same checks. Namely:
855 If the register no larger than a word, the subreg must be lowpart.
856 If the register is larger than a word, the subreg must be the lowpart
857 of a subword. A subreg does *not* perform arbitrary bit extraction.
858 Given that we've already checked mode/offset alignment, we only have
859 to check subword subregs here. */
55a2c322
VM
860 if (osize < UNITS_PER_WORD
861 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684
RH
862 {
863 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
864 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
865 if (offset % UNITS_PER_WORD != low_off)
866 return false;
867 }
868 return true;
869}
870
871rtx
872gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
873{
874 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 875 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
876}
877
173b24b9
RK
878/* Generate a SUBREG representing the least-significant part of REG if MODE
879 is smaller than mode of REG, otherwise paradoxical SUBREG. */
880
ddef6bc7 881rtx
502b8322 882gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
883{
884 enum machine_mode inmode;
ddef6bc7
JJ
885
886 inmode = GET_MODE (reg);
887 if (inmode == VOIDmode)
888 inmode = mode;
e0e08ac2
JH
889 return gen_rtx_SUBREG (mode, reg,
890 subreg_lowpart_offset (mode, inmode));
ddef6bc7 891}
fcc74520
RS
892
893rtx
894gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
895 enum var_init_status status)
896{
897 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
898 PAT_VAR_LOCATION_STATUS (x) = status;
899 return x;
900}
c5c76735 901\f
23b2ce53 902
80379f51
PB
903/* Create an rtvec and stores within it the RTXen passed in the arguments. */
904
23b2ce53 905rtvec
e34d07f2 906gen_rtvec (int n, ...)
23b2ce53 907{
80379f51
PB
908 int i;
909 rtvec rt_val;
e34d07f2 910 va_list p;
23b2ce53 911
e34d07f2 912 va_start (p, n);
23b2ce53 913
80379f51 914 /* Don't allocate an empty rtvec... */
23b2ce53 915 if (n == 0)
0edf1bb2
JL
916 {
917 va_end (p);
918 return NULL_RTVEC;
919 }
23b2ce53 920
80379f51 921 rt_val = rtvec_alloc (n);
4f90e4a0 922
23b2ce53 923 for (i = 0; i < n; i++)
80379f51 924 rt_val->elem[i] = va_arg (p, rtx);
6268b922 925
e34d07f2 926 va_end (p);
80379f51 927 return rt_val;
23b2ce53
RS
928}
929
930rtvec
502b8322 931gen_rtvec_v (int n, rtx *argp)
23b2ce53 932{
b3694847
SS
933 int i;
934 rtvec rt_val;
23b2ce53 935
80379f51 936 /* Don't allocate an empty rtvec... */
23b2ce53 937 if (n == 0)
80379f51 938 return NULL_RTVEC;
23b2ce53 939
80379f51 940 rt_val = rtvec_alloc (n);
23b2ce53
RS
941
942 for (i = 0; i < n; i++)
8f985ec4 943 rt_val->elem[i] = *argp++;
23b2ce53
RS
944
945 return rt_val;
946}
e6eda746
DM
947
948rtvec
949gen_rtvec_v (int n, rtx_insn **argp)
950{
951 int i;
952 rtvec rt_val;
953
954 /* Don't allocate an empty rtvec... */
955 if (n == 0)
956 return NULL_RTVEC;
957
958 rt_val = rtvec_alloc (n);
959
960 for (i = 0; i < n; i++)
961 rt_val->elem[i] = *argp++;
962
963 return rt_val;
964}
965
23b2ce53 966\f
38ae7651
RS
967/* Return the number of bytes between the start of an OUTER_MODE
968 in-memory value and the start of an INNER_MODE in-memory value,
969 given that the former is a lowpart of the latter. It may be a
970 paradoxical lowpart, in which case the offset will be negative
971 on big-endian targets. */
972
973int
974byte_lowpart_offset (enum machine_mode outer_mode,
975 enum machine_mode inner_mode)
976{
977 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
978 return subreg_lowpart_offset (outer_mode, inner_mode);
979 else
980 return -subreg_lowpart_offset (inner_mode, outer_mode);
981}
982\f
23b2ce53
RS
983/* Generate a REG rtx for a new pseudo register of mode MODE.
984 This pseudo is assigned the next sequential register number. */
985
986rtx
502b8322 987gen_reg_rtx (enum machine_mode mode)
23b2ce53 988{
b3694847 989 rtx val;
2e3f842f 990 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 991
f8335a4f 992 gcc_assert (can_create_pseudo_p ());
23b2ce53 993
2e3f842f
L
994 /* If a virtual register with bigger mode alignment is generated,
995 increase stack alignment estimation because it might be spilled
996 to stack later. */
b8698a0f 997 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
998 && crtl->stack_alignment_estimated < align
999 && !crtl->stack_realign_processed)
ae58e548
JJ
1000 {
1001 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1002 if (crtl->stack_alignment_estimated < min_align)
1003 crtl->stack_alignment_estimated = min_align;
1004 }
2e3f842f 1005
1b3d8f8a
GK
1006 if (generating_concat_p
1007 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1008 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
1009 {
1010 /* For complex modes, don't make a single pseudo.
1011 Instead, make a CONCAT of two pseudos.
1012 This allows noncontiguous allocation of the real and imaginary parts,
1013 which makes much better code. Besides, allocating DCmode
1014 pseudos overstrains reload on some machines like the 386. */
1015 rtx realpart, imagpart;
27e58a70 1016 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1017
1018 realpart = gen_reg_rtx (partmode);
1019 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1020 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1021 }
1022
004a7e45
UB
1023 /* Do not call gen_reg_rtx with uninitialized crtl. */
1024 gcc_assert (crtl->emit.regno_pointer_align_length);
1025
a560d4d4 1026 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 1027 enough to have an element for this pseudo reg number. */
23b2ce53 1028
3e029763 1029 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 1030 {
3e029763 1031 int old_size = crtl->emit.regno_pointer_align_length;
60564289 1032 char *tmp;
0d4903b8 1033 rtx *new1;
0d4903b8 1034
60564289
KG
1035 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1036 memset (tmp + old_size, 0, old_size);
1037 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 1038
1b4572a8 1039 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 1040 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
1041 regno_reg_rtx = new1;
1042
3e029763 1043 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
1044 }
1045
08394eef 1046 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
1047 regno_reg_rtx[reg_rtx_no++] = val;
1048 return val;
1049}
1050
a698cc03
JL
1051/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1052
1053bool
1054reg_is_parm_p (rtx reg)
1055{
1056 tree decl;
1057
1058 gcc_assert (REG_P (reg));
1059 decl = REG_EXPR (reg);
1060 return (decl && TREE_CODE (decl) == PARM_DECL);
1061}
1062
38ae7651
RS
1063/* Update NEW with the same attributes as REG, but with OFFSET added
1064 to the REG_OFFSET. */
a560d4d4 1065
e53a16e7 1066static void
60564289 1067update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 1068{
60564289 1069 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 1070 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1071}
1072
38ae7651
RS
1073/* Generate a register with same attributes as REG, but with OFFSET
1074 added to the REG_OFFSET. */
e53a16e7
ILT
1075
1076rtx
1077gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1078 int offset)
1079{
60564289 1080 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1081
60564289
KG
1082 update_reg_offset (new_rtx, reg, offset);
1083 return new_rtx;
e53a16e7
ILT
1084}
1085
1086/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1087 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1088
1089rtx
1090gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1091{
60564289 1092 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1093
60564289
KG
1094 update_reg_offset (new_rtx, reg, offset);
1095 return new_rtx;
a560d4d4
JH
1096}
1097
38ae7651
RS
1098/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1099 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1100
1101void
38ae7651 1102adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 1103{
38ae7651
RS
1104 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1105 PUT_MODE (reg, mode);
1106}
1107
1108/* Copy REG's attributes from X, if X has any attributes. If REG and X
1109 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1110
1111void
1112set_reg_attrs_from_value (rtx reg, rtx x)
1113{
1114 int offset;
de6f3f7a
L
1115 bool can_be_reg_pointer = true;
1116
1117 /* Don't call mark_reg_pointer for incompatible pointer sign
1118 extension. */
1119 while (GET_CODE (x) == SIGN_EXTEND
1120 || GET_CODE (x) == ZERO_EXTEND
1121 || GET_CODE (x) == TRUNCATE
1122 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1123 {
1124#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1125 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1126 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1127 can_be_reg_pointer = false;
1128#endif
1129 x = XEXP (x, 0);
1130 }
38ae7651 1131
923ba36f
JJ
1132 /* Hard registers can be reused for multiple purposes within the same
1133 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1134 on them is wrong. */
1135 if (HARD_REGISTER_P (reg))
1136 return;
1137
38ae7651 1138 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1139 if (MEM_P (x))
1140 {
527210c4
RS
1141 if (MEM_OFFSET_KNOWN_P (x))
1142 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1143 MEM_OFFSET (x) + offset);
de6f3f7a 1144 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1145 mark_reg_pointer (reg, 0);
46b71b03
PB
1146 }
1147 else if (REG_P (x))
1148 {
1149 if (REG_ATTRS (x))
1150 update_reg_offset (reg, x, offset);
de6f3f7a 1151 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1152 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1153 }
1154}
1155
1156/* Generate a REG rtx for a new pseudo register, copying the mode
1157 and attributes from X. */
1158
1159rtx
1160gen_reg_rtx_and_attrs (rtx x)
1161{
1162 rtx reg = gen_reg_rtx (GET_MODE (x));
1163 set_reg_attrs_from_value (reg, x);
1164 return reg;
a560d4d4
JH
1165}
1166
9d18e06b
JZ
1167/* Set the register attributes for registers contained in PARM_RTX.
1168 Use needed values from memory attributes of MEM. */
1169
1170void
502b8322 1171set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1172{
f8cfc6aa 1173 if (REG_P (parm_rtx))
38ae7651 1174 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1175 else if (GET_CODE (parm_rtx) == PARALLEL)
1176 {
1177 /* Check for a NULL entry in the first slot, used to indicate that the
1178 parameter goes both on the stack and in registers. */
1179 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1180 for (; i < XVECLEN (parm_rtx, 0); i++)
1181 {
1182 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1183 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1184 REG_ATTRS (XEXP (x, 0))
1185 = get_reg_attrs (MEM_EXPR (mem),
1186 INTVAL (XEXP (x, 1)));
1187 }
1188 }
1189}
1190
38ae7651
RS
1191/* Set the REG_ATTRS for registers in value X, given that X represents
1192 decl T. */
a560d4d4 1193
4e3825db 1194void
38ae7651
RS
1195set_reg_attrs_for_decl_rtl (tree t, rtx x)
1196{
1197 if (GET_CODE (x) == SUBREG)
fbe6ec81 1198 {
38ae7651
RS
1199 gcc_assert (subreg_lowpart_p (x));
1200 x = SUBREG_REG (x);
fbe6ec81 1201 }
f8cfc6aa 1202 if (REG_P (x))
38ae7651
RS
1203 REG_ATTRS (x)
1204 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1205 DECL_MODE (t)));
a560d4d4
JH
1206 if (GET_CODE (x) == CONCAT)
1207 {
1208 if (REG_P (XEXP (x, 0)))
1209 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1210 if (REG_P (XEXP (x, 1)))
1211 REG_ATTRS (XEXP (x, 1))
1212 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1213 }
1214 if (GET_CODE (x) == PARALLEL)
1215 {
d4afac5b
JZ
1216 int i, start;
1217
1218 /* Check for a NULL entry, used to indicate that the parameter goes
1219 both on the stack and in registers. */
1220 if (XEXP (XVECEXP (x, 0, 0), 0))
1221 start = 0;
1222 else
1223 start = 1;
1224
1225 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1226 {
1227 rtx y = XVECEXP (x, 0, i);
1228 if (REG_P (XEXP (y, 0)))
1229 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1230 }
1231 }
1232}
1233
38ae7651
RS
1234/* Assign the RTX X to declaration T. */
1235
1236void
1237set_decl_rtl (tree t, rtx x)
1238{
1239 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1240 if (x)
1241 set_reg_attrs_for_decl_rtl (t, x);
1242}
1243
5141868d
RS
1244/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1245 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1246
1247void
5141868d 1248set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1249{
1250 DECL_INCOMING_RTL (t) = x;
5141868d 1251 if (x && !by_reference_p)
38ae7651
RS
1252 set_reg_attrs_for_decl_rtl (t, x);
1253}
1254
754fdcca
RK
1255/* Identify REG (which may be a CONCAT) as a user register. */
1256
1257void
502b8322 1258mark_user_reg (rtx reg)
754fdcca
RK
1259{
1260 if (GET_CODE (reg) == CONCAT)
1261 {
1262 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1263 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1264 }
754fdcca 1265 else
5b0264cb
NS
1266 {
1267 gcc_assert (REG_P (reg));
1268 REG_USERVAR_P (reg) = 1;
1269 }
754fdcca
RK
1270}
1271
86fe05e0
RK
1272/* Identify REG as a probable pointer register and show its alignment
1273 as ALIGN, if nonzero. */
23b2ce53
RS
1274
1275void
502b8322 1276mark_reg_pointer (rtx reg, int align)
23b2ce53 1277{
3502dc9c 1278 if (! REG_POINTER (reg))
00995e78 1279 {
3502dc9c 1280 REG_POINTER (reg) = 1;
86fe05e0 1281
00995e78
RE
1282 if (align)
1283 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1284 }
1285 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1286 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1287 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1288}
1289
1290/* Return 1 plus largest pseudo reg number used in the current function. */
1291
1292int
502b8322 1293max_reg_num (void)
23b2ce53
RS
1294{
1295 return reg_rtx_no;
1296}
1297
1298/* Return 1 + the largest label number used so far in the current function. */
1299
1300int
502b8322 1301max_label_num (void)
23b2ce53 1302{
23b2ce53
RS
1303 return label_num;
1304}
1305
1306/* Return first label number used in this function (if any were used). */
1307
1308int
502b8322 1309get_first_label_num (void)
23b2ce53
RS
1310{
1311 return first_label_num;
1312}
6de9cd9a
DN
1313
1314/* If the rtx for label was created during the expansion of a nested
1315 function, then first_label_num won't include this label number.
fa10beec 1316 Fix this now so that array indices work later. */
6de9cd9a
DN
1317
1318void
1319maybe_set_first_label_num (rtx x)
1320{
1321 if (CODE_LABEL_NUMBER (x) < first_label_num)
1322 first_label_num = CODE_LABEL_NUMBER (x);
1323}
23b2ce53
RS
1324\f
1325/* Return a value representing some low-order bits of X, where the number
1326 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1327 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1328 representation is returned.
1329
1330 This function handles the cases in common between gen_lowpart, below,
1331 and two variants in cse.c and combine.c. These are the cases that can
1332 be safely handled at all points in the compilation.
1333
1334 If this is not a case we can handle, return 0. */
1335
1336rtx
502b8322 1337gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1338{
ddef6bc7 1339 int msize = GET_MODE_SIZE (mode);
550d1387 1340 int xsize;
ddef6bc7 1341 int offset = 0;
550d1387
GK
1342 enum machine_mode innermode;
1343
1344 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1345 so we have to make one up. Yuk. */
1346 innermode = GET_MODE (x);
481683e1 1347 if (CONST_INT_P (x)
db487452 1348 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1349 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1350 else if (innermode == VOIDmode)
49ab6098 1351 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1352
550d1387
GK
1353 xsize = GET_MODE_SIZE (innermode);
1354
5b0264cb 1355 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1356
550d1387 1357 if (innermode == mode)
23b2ce53
RS
1358 return x;
1359
1360 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1361 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1362 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1363 return 0;
1364
53501a19 1365 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1366 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1367 return 0;
1368
550d1387 1369 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1370
1371 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1372 && (GET_MODE_CLASS (mode) == MODE_INT
1373 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1374 {
1375 /* If we are getting the low-order part of something that has been
1376 sign- or zero-extended, we can either just use the object being
1377 extended or make a narrower extension. If we want an even smaller
1378 piece than the size of the object being extended, call ourselves
1379 recursively.
1380
1381 This case is used mostly by combine and cse. */
1382
1383 if (GET_MODE (XEXP (x, 0)) == mode)
1384 return XEXP (x, 0);
550d1387 1385 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1386 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1387 else if (msize < xsize)
3b80f6ca 1388 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1389 }
f8cfc6aa 1390 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1391 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1392 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
550d1387 1393 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1394
23b2ce53
RS
1395 /* Otherwise, we can't do this. */
1396 return 0;
1397}
1398\f
ccba022b 1399rtx
502b8322 1400gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1401{
ddef6bc7 1402 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1403 rtx result;
ddef6bc7 1404
ccba022b
RS
1405 /* This case loses if X is a subreg. To catch bugs early,
1406 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1407 gcc_assert (msize <= UNITS_PER_WORD
1408 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1409
e0e08ac2
JH
1410 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1411 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1412 gcc_assert (result);
b8698a0f 1413
09482e0d
JW
1414 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1415 the target if we have a MEM. gen_highpart must return a valid operand,
1416 emitting code if necessary to do so. */
5b0264cb
NS
1417 if (MEM_P (result))
1418 {
1419 result = validize_mem (result);
1420 gcc_assert (result);
1421 }
b8698a0f 1422
e0e08ac2
JH
1423 return result;
1424}
5222e470 1425
26d249eb 1426/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1427 be VOIDmode constant. */
1428rtx
502b8322 1429gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1430{
1431 if (GET_MODE (exp) != VOIDmode)
1432 {
5b0264cb 1433 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1434 return gen_highpart (outermode, exp);
1435 }
1436 return simplify_gen_subreg (outermode, exp, innermode,
1437 subreg_highpart_offset (outermode, innermode));
1438}
68252e27 1439
38ae7651 1440/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1441
e0e08ac2 1442unsigned int
502b8322 1443subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1444{
1445 unsigned int offset = 0;
1446 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1447
e0e08ac2 1448 if (difference > 0)
ccba022b 1449 {
e0e08ac2
JH
1450 if (WORDS_BIG_ENDIAN)
1451 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1452 if (BYTES_BIG_ENDIAN)
1453 offset += difference % UNITS_PER_WORD;
ccba022b 1454 }
ddef6bc7 1455
e0e08ac2 1456 return offset;
ccba022b 1457}
eea50aa0 1458
e0e08ac2
JH
1459/* Return offset in bytes to get OUTERMODE high part
1460 of the value in mode INNERMODE stored in memory in target format. */
1461unsigned int
502b8322 1462subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1463{
1464 unsigned int offset = 0;
1465 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1466
5b0264cb 1467 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1468
eea50aa0
JH
1469 if (difference > 0)
1470 {
e0e08ac2 1471 if (! WORDS_BIG_ENDIAN)
eea50aa0 1472 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1473 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1474 offset += difference % UNITS_PER_WORD;
1475 }
1476
e0e08ac2 1477 return offset;
eea50aa0 1478}
ccba022b 1479
23b2ce53
RS
1480/* Return 1 iff X, assumed to be a SUBREG,
1481 refers to the least significant part of its containing reg.
1482 If X is not a SUBREG, always return 1 (it is its own low part!). */
1483
1484int
fa233e34 1485subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1486{
1487 if (GET_CODE (x) != SUBREG)
1488 return 1;
a3a03040
RK
1489 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1490 return 0;
23b2ce53 1491
e0e08ac2
JH
1492 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1493 == SUBREG_BYTE (x));
23b2ce53 1494}
6a4bdc79
BS
1495
1496/* Return true if X is a paradoxical subreg, false otherwise. */
1497bool
1498paradoxical_subreg_p (const_rtx x)
1499{
1500 if (GET_CODE (x) != SUBREG)
1501 return false;
1502 return (GET_MODE_PRECISION (GET_MODE (x))
1503 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1504}
23b2ce53 1505\f
ddef6bc7
JJ
1506/* Return subword OFFSET of operand OP.
1507 The word number, OFFSET, is interpreted as the word number starting
1508 at the low-order address. OFFSET 0 is the low-order word if not
1509 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1510
1511 If we cannot extract the required word, we return zero. Otherwise,
1512 an rtx corresponding to the requested word will be returned.
1513
1514 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1515 reload has completed, a valid address will always be returned. After
1516 reload, if a valid address cannot be returned, we return zero.
1517
1518 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1519 it is the responsibility of the caller.
1520
1521 MODE is the mode of OP in case it is a CONST_INT.
1522
1523 ??? This is still rather broken for some cases. The problem for the
1524 moment is that all callers of this thing provide no 'goal mode' to
1525 tell us to work with. This exists because all callers were written
0631e0bf
JH
1526 in a word based SUBREG world.
1527 Now use of this function can be deprecated by simplify_subreg in most
1528 cases.
1529 */
ddef6bc7
JJ
1530
1531rtx
502b8322 1532operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1533{
1534 if (mode == VOIDmode)
1535 mode = GET_MODE (op);
1536
5b0264cb 1537 gcc_assert (mode != VOIDmode);
ddef6bc7 1538
30f7a378 1539 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1540 if (mode != BLKmode
1541 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1542 return 0;
1543
30f7a378 1544 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1545 if (mode != BLKmode
1546 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1547 return const0_rtx;
1548
ddef6bc7 1549 /* Form a new MEM at the requested address. */
3c0cb5de 1550 if (MEM_P (op))
ddef6bc7 1551 {
60564289 1552 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1553
f1ec5147 1554 if (! validate_address)
60564289 1555 return new_rtx;
f1ec5147
RK
1556
1557 else if (reload_completed)
ddef6bc7 1558 {
09e881c9
BE
1559 if (! strict_memory_address_addr_space_p (word_mode,
1560 XEXP (new_rtx, 0),
1561 MEM_ADDR_SPACE (op)))
f1ec5147 1562 return 0;
ddef6bc7 1563 }
f1ec5147 1564 else
60564289 1565 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1566 }
1567
0631e0bf
JH
1568 /* Rest can be handled by simplify_subreg. */
1569 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1570}
1571
535a42b1
NS
1572/* Similar to `operand_subword', but never return 0. If we can't
1573 extract the required subword, put OP into a register and try again.
1574 The second attempt must succeed. We always validate the address in
1575 this case.
23b2ce53
RS
1576
1577 MODE is the mode of OP, in case it is CONST_INT. */
1578
1579rtx
502b8322 1580operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1581{
ddef6bc7 1582 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1583
1584 if (result)
1585 return result;
1586
1587 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1588 {
1589 /* If this is a register which can not be accessed by words, copy it
1590 to a pseudo register. */
f8cfc6aa 1591 if (REG_P (op))
77e6b0eb
JC
1592 op = copy_to_reg (op);
1593 else
1594 op = force_reg (mode, op);
1595 }
23b2ce53 1596
ddef6bc7 1597 result = operand_subword (op, offset, 1, mode);
5b0264cb 1598 gcc_assert (result);
23b2ce53
RS
1599
1600 return result;
1601}
1602\f
2b3493c8
AK
1603/* Returns 1 if both MEM_EXPR can be considered equal
1604 and 0 otherwise. */
1605
1606int
4f588890 1607mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1608{
1609 if (expr1 == expr2)
1610 return 1;
1611
1612 if (! expr1 || ! expr2)
1613 return 0;
1614
1615 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1616 return 0;
1617
55b34b5f 1618 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1619}
1620
805903b5
JJ
1621/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1622 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1623 -1 if not known. */
1624
1625int
d9223014 1626get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1627{
1628 tree expr;
1629 unsigned HOST_WIDE_INT offset;
1630
1631 /* This function can't use
527210c4 1632 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1633 || (MAX (MEM_ALIGN (mem),
0eb77834 1634 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1635 < align))
1636 return -1;
1637 else
527210c4 1638 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1639 for two reasons:
1640 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1641 for <variable>. get_inner_reference doesn't handle it and
1642 even if it did, the alignment in that case needs to be determined
1643 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1644 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1645 isn't sufficiently aligned, the object it is in might be. */
1646 gcc_assert (MEM_P (mem));
1647 expr = MEM_EXPR (mem);
527210c4 1648 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1649 return -1;
1650
527210c4 1651 offset = MEM_OFFSET (mem);
805903b5
JJ
1652 if (DECL_P (expr))
1653 {
1654 if (DECL_ALIGN (expr) < align)
1655 return -1;
1656 }
1657 else if (INDIRECT_REF_P (expr))
1658 {
1659 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1660 return -1;
1661 }
1662 else if (TREE_CODE (expr) == COMPONENT_REF)
1663 {
1664 while (1)
1665 {
1666 tree inner = TREE_OPERAND (expr, 0);
1667 tree field = TREE_OPERAND (expr, 1);
1668 tree byte_offset = component_ref_field_offset (expr);
1669 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1670
1671 if (!byte_offset
cc269bb6
RS
1672 || !tree_fits_uhwi_p (byte_offset)
1673 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1674 return -1;
1675
ae7e9ddd
RS
1676 offset += tree_to_uhwi (byte_offset);
1677 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1678
1679 if (inner == NULL_TREE)
1680 {
1681 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1682 < (unsigned int) align)
1683 return -1;
1684 break;
1685 }
1686 else if (DECL_P (inner))
1687 {
1688 if (DECL_ALIGN (inner) < align)
1689 return -1;
1690 break;
1691 }
1692 else if (TREE_CODE (inner) != COMPONENT_REF)
1693 return -1;
1694 expr = inner;
1695 }
1696 }
1697 else
1698 return -1;
1699
1700 return offset & ((align / BITS_PER_UNIT) - 1);
1701}
1702
6926c713 1703/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1704 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1705 if we are making a new object of this type. BITPOS is nonzero if
1706 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1707
1708void
502b8322
AJ
1709set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1710 HOST_WIDE_INT bitpos)
173b24b9 1711{
6f1087be 1712 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1713 tree type;
f12144dd 1714 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1715 addr_space_t as;
173b24b9
RK
1716
1717 /* It can happen that type_for_mode was given a mode for which there
1718 is no language-level type. In which case it returns NULL, which
1719 we can see here. */
1720 if (t == NULL_TREE)
1721 return;
1722
1723 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1724 if (type == error_mark_node)
1725 return;
173b24b9 1726
173b24b9
RK
1727 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1728 wrong answer, as it assumes that DECL_RTL already has the right alias
1729 info. Callers should not set DECL_RTL until after the call to
1730 set_mem_attributes. */
5b0264cb 1731 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1732
f12144dd
RS
1733 memset (&attrs, 0, sizeof (attrs));
1734
738cc472 1735 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1736 front-end routine) and use it. */
f12144dd 1737 attrs.alias = get_alias_set (t);
173b24b9 1738
a5e9c810 1739 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1740 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1741
268f7033 1742 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1743 refattrs = MEM_ATTRS (ref);
1744 if (refattrs)
268f7033
UW
1745 {
1746 /* ??? Can this ever happen? Calling this routine on a MEM that
1747 already carries memory attributes should probably be invalid. */
f12144dd 1748 attrs.expr = refattrs->expr;
754c3d5d 1749 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1750 attrs.offset = refattrs->offset;
754c3d5d 1751 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1752 attrs.size = refattrs->size;
1753 attrs.align = refattrs->align;
268f7033
UW
1754 }
1755
1756 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1757 else
268f7033 1758 {
f12144dd
RS
1759 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1760 gcc_assert (!defattrs->expr);
754c3d5d 1761 gcc_assert (!defattrs->offset_known_p);
f12144dd 1762
268f7033 1763 /* Respect mode size. */
754c3d5d 1764 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1765 attrs.size = defattrs->size;
268f7033
UW
1766 /* ??? Is this really necessary? We probably should always get
1767 the size from the type below. */
1768
1769 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1770 if T is an object, always compute the object alignment below. */
f12144dd
RS
1771 if (TYPE_P (t))
1772 attrs.align = defattrs->align;
1773 else
1774 attrs.align = BITS_PER_UNIT;
268f7033
UW
1775 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1776 e.g. if the type carries an alignment attribute. Should we be
1777 able to simply always use TYPE_ALIGN? */
1778 }
1779
c3d32120
RK
1780 /* We can set the alignment from the type if we are making an object,
1781 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1782 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1783 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1784
738cc472 1785 /* If the size is known, we can set that. */
a787ccc3 1786 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1787
30b0317c
RB
1788 /* The address-space is that of the type. */
1789 as = TYPE_ADDR_SPACE (type);
1790
80965c18
RK
1791 /* If T is not a type, we may be able to deduce some more information about
1792 the expression. */
1793 if (! TYPE_P (t))
8ac61af7 1794 {
8476af98 1795 tree base;
389fdba0 1796
8ac61af7
RK
1797 if (TREE_THIS_VOLATILE (t))
1798 MEM_VOLATILE_P (ref) = 1;
173b24b9 1799
c56e3582
RK
1800 /* Now remove any conversions: they don't change what the underlying
1801 object is. Likewise for SAVE_EXPR. */
1043771b 1802 while (CONVERT_EXPR_P (t)
c56e3582
RK
1803 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1804 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1805 t = TREE_OPERAND (t, 0);
1806
4994da65
RG
1807 /* Note whether this expression can trap. */
1808 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1809
1810 base = get_base_address (t);
f18a7b25
MJ
1811 if (base)
1812 {
1813 if (DECL_P (base)
1814 && TREE_READONLY (base)
1815 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1816 && !TREE_THIS_VOLATILE (base))
1817 MEM_READONLY_P (ref) = 1;
1818
1819 /* Mark static const strings readonly as well. */
1820 if (TREE_CODE (base) == STRING_CST
1821 && TREE_READONLY (base)
1822 && TREE_STATIC (base))
1823 MEM_READONLY_P (ref) = 1;
1824
30b0317c 1825 /* Address-space information is on the base object. */
f18a7b25
MJ
1826 if (TREE_CODE (base) == MEM_REF
1827 || TREE_CODE (base) == TARGET_MEM_REF)
1828 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1829 0))));
1830 else
1831 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1832 }
ba30e50d 1833
2039d7aa
RH
1834 /* If this expression uses it's parent's alias set, mark it such
1835 that we won't change it. */
b4ada065 1836 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
1837 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1838
8ac61af7
RK
1839 /* If this is a decl, set the attributes of the MEM from it. */
1840 if (DECL_P (t))
1841 {
f12144dd 1842 attrs.expr = t;
754c3d5d
RS
1843 attrs.offset_known_p = true;
1844 attrs.offset = 0;
6f1087be 1845 apply_bitpos = bitpos;
a787ccc3 1846 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1847 }
1848
30b0317c 1849 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1850 else if (CONSTANT_CLASS_P (t))
30b0317c 1851 ;
998d7deb 1852
a787ccc3
RS
1853 /* If this is a field reference, record it. */
1854 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1855 {
f12144dd 1856 attrs.expr = t;
754c3d5d
RS
1857 attrs.offset_known_p = true;
1858 attrs.offset = 0;
6f1087be 1859 apply_bitpos = bitpos;
a787ccc3
RS
1860 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1861 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1862 }
1863
1864 /* If this is an array reference, look for an outer field reference. */
1865 else if (TREE_CODE (t) == ARRAY_REF)
1866 {
1867 tree off_tree = size_zero_node;
1b1838b6
JW
1868 /* We can't modify t, because we use it at the end of the
1869 function. */
1870 tree t2 = t;
998d7deb
RH
1871
1872 do
1873 {
1b1838b6 1874 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1875 tree low_bound = array_ref_low_bound (t2);
1876 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1877
1878 /* We assume all arrays have sizes that are a multiple of a byte.
1879 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1880 index, then convert to sizetype and multiply by the size of
1881 the array element. */
1882 if (! integer_zerop (low_bound))
4845b383
KH
1883 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1884 index, low_bound);
2567406a 1885
44de5aeb 1886 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1887 size_binop (MULT_EXPR,
1888 fold_convert (sizetype,
1889 index),
44de5aeb
RK
1890 unit_size),
1891 off_tree);
1b1838b6 1892 t2 = TREE_OPERAND (t2, 0);
998d7deb 1893 }
1b1838b6 1894 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1895
30b0317c
RB
1896 if (DECL_P (t2)
1897 || TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1898 {
f12144dd 1899 attrs.expr = t2;
754c3d5d 1900 attrs.offset_known_p = false;
cc269bb6 1901 if (tree_fits_uhwi_p (off_tree))
6f1087be 1902 {
754c3d5d 1903 attrs.offset_known_p = true;
ae7e9ddd 1904 attrs.offset = tree_to_uhwi (off_tree);
6f1087be
RH
1905 apply_bitpos = bitpos;
1906 }
998d7deb 1907 }
30b0317c 1908 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1909 }
1910
56c47f22 1911 /* If this is an indirect reference, record it. */
70f34814 1912 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1913 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1914 {
f12144dd 1915 attrs.expr = t;
754c3d5d
RS
1916 attrs.offset_known_p = true;
1917 attrs.offset = 0;
56c47f22
RG
1918 apply_bitpos = bitpos;
1919 }
1920
30b0317c
RB
1921 /* Compute the alignment. */
1922 unsigned int obj_align;
1923 unsigned HOST_WIDE_INT obj_bitpos;
1924 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1925 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1926 if (obj_bitpos != 0)
1927 obj_align = (obj_bitpos & -obj_bitpos);
1928 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
1929 }
1930
cc269bb6 1931 if (tree_fits_uhwi_p (new_size))
a787ccc3
RS
1932 {
1933 attrs.size_known_p = true;
ae7e9ddd 1934 attrs.size = tree_to_uhwi (new_size);
a787ccc3
RS
1935 }
1936
15c812e3 1937 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1938 bit position offset. Similarly, increase the size of the accessed
1939 object to contain the negative offset. */
6f1087be 1940 if (apply_bitpos)
8c317c5f 1941 {
754c3d5d
RS
1942 gcc_assert (attrs.offset_known_p);
1943 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1944 if (attrs.size_known_p)
1945 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1946 }
6f1087be 1947
8ac61af7 1948 /* Now set the attributes we computed above. */
f18a7b25 1949 attrs.addrspace = as;
f12144dd 1950 set_mem_attrs (ref, &attrs);
173b24b9
RK
1951}
1952
6f1087be 1953void
502b8322 1954set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1955{
1956 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1957}
1958
173b24b9
RK
1959/* Set the alias set of MEM to SET. */
1960
1961void
4862826d 1962set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1963{
f12144dd
RS
1964 struct mem_attrs attrs;
1965
173b24b9 1966 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1967 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
1968 attrs = *get_mem_attrs (mem);
1969 attrs.alias = set;
1970 set_mem_attrs (mem, &attrs);
09e881c9
BE
1971}
1972
1973/* Set the address space of MEM to ADDRSPACE (target-defined). */
1974
1975void
1976set_mem_addr_space (rtx mem, addr_space_t addrspace)
1977{
f12144dd
RS
1978 struct mem_attrs attrs;
1979
1980 attrs = *get_mem_attrs (mem);
1981 attrs.addrspace = addrspace;
1982 set_mem_attrs (mem, &attrs);
173b24b9 1983}
738cc472 1984
d022d93e 1985/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1986
1987void
502b8322 1988set_mem_align (rtx mem, unsigned int align)
738cc472 1989{
f12144dd
RS
1990 struct mem_attrs attrs;
1991
1992 attrs = *get_mem_attrs (mem);
1993 attrs.align = align;
1994 set_mem_attrs (mem, &attrs);
738cc472 1995}
1285011e 1996
998d7deb 1997/* Set the expr for MEM to EXPR. */
1285011e
RK
1998
1999void
502b8322 2000set_mem_expr (rtx mem, tree expr)
1285011e 2001{
f12144dd
RS
2002 struct mem_attrs attrs;
2003
2004 attrs = *get_mem_attrs (mem);
2005 attrs.expr = expr;
2006 set_mem_attrs (mem, &attrs);
1285011e 2007}
998d7deb
RH
2008
2009/* Set the offset of MEM to OFFSET. */
2010
2011void
527210c4 2012set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 2013{
f12144dd
RS
2014 struct mem_attrs attrs;
2015
2016 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2017 attrs.offset_known_p = true;
2018 attrs.offset = offset;
527210c4
RS
2019 set_mem_attrs (mem, &attrs);
2020}
2021
2022/* Clear the offset of MEM. */
2023
2024void
2025clear_mem_offset (rtx mem)
2026{
2027 struct mem_attrs attrs;
2028
2029 attrs = *get_mem_attrs (mem);
754c3d5d 2030 attrs.offset_known_p = false;
f12144dd 2031 set_mem_attrs (mem, &attrs);
35aff10b
AM
2032}
2033
2034/* Set the size of MEM to SIZE. */
2035
2036void
f5541398 2037set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 2038{
f12144dd
RS
2039 struct mem_attrs attrs;
2040
2041 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2042 attrs.size_known_p = true;
2043 attrs.size = size;
f5541398
RS
2044 set_mem_attrs (mem, &attrs);
2045}
2046
2047/* Clear the size of MEM. */
2048
2049void
2050clear_mem_size (rtx mem)
2051{
2052 struct mem_attrs attrs;
2053
2054 attrs = *get_mem_attrs (mem);
754c3d5d 2055 attrs.size_known_p = false;
f12144dd 2056 set_mem_attrs (mem, &attrs);
998d7deb 2057}
173b24b9 2058\f
738cc472
RK
2059/* Return a memory reference like MEMREF, but with its mode changed to MODE
2060 and its address changed to ADDR. (VOIDmode means don't change the mode.
2061 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2062 returned memory location is required to be valid. INPLACE is true if any
2063 changes can be made directly to MEMREF or false if MEMREF must be treated
2064 as immutable.
2065
2066 The memory attributes are not changed. */
23b2ce53 2067
738cc472 2068static rtx
23b33725
RS
2069change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2070 bool inplace)
23b2ce53 2071{
09e881c9 2072 addr_space_t as;
60564289 2073 rtx new_rtx;
23b2ce53 2074
5b0264cb 2075 gcc_assert (MEM_P (memref));
09e881c9 2076 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2077 if (mode == VOIDmode)
2078 mode = GET_MODE (memref);
2079 if (addr == 0)
2080 addr = XEXP (memref, 0);
a74ff877 2081 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2082 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2083 return memref;
23b2ce53 2084
91c5ee5b
VM
2085 /* Don't validate address for LRA. LRA can make the address valid
2086 by itself in most efficient way. */
2087 if (validate && !lra_in_progress)
23b2ce53 2088 {
f1ec5147 2089 if (reload_in_progress || reload_completed)
09e881c9 2090 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2091 else
09e881c9 2092 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2093 }
750c9258 2094
9b04c6a8
RK
2095 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2096 return memref;
2097
23b33725
RS
2098 if (inplace)
2099 {
2100 XEXP (memref, 0) = addr;
2101 return memref;
2102 }
2103
60564289
KG
2104 new_rtx = gen_rtx_MEM (mode, addr);
2105 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2106 return new_rtx;
23b2ce53 2107}
792760b9 2108
738cc472
RK
2109/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2110 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2111
2112rtx
502b8322 2113change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 2114{
23b33725 2115 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
60564289 2116 enum machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2117 struct mem_attrs attrs, *defattrs;
4e44c1ef 2118
f12144dd
RS
2119 attrs = *get_mem_attrs (memref);
2120 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2121 attrs.expr = NULL_TREE;
2122 attrs.offset_known_p = false;
2123 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2124 attrs.size = defattrs->size;
2125 attrs.align = defattrs->align;
c2f7bcc3 2126
fdb1c7b3 2127 /* If there are no changes, just return the original memory reference. */
60564289 2128 if (new_rtx == memref)
4e44c1ef 2129 {
f12144dd 2130 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2131 return new_rtx;
4e44c1ef 2132
60564289
KG
2133 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2134 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2135 }
fdb1c7b3 2136
f12144dd 2137 set_mem_attrs (new_rtx, &attrs);
60564289 2138 return new_rtx;
f4ef873c 2139}
792760b9 2140
738cc472
RK
2141/* Return a memory reference like MEMREF, but with its mode changed
2142 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2143 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2144 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2145 and the caller is responsible for adjusting MEMREF base register.
2146 If ADJUST_OBJECT is zero, the underlying object associated with the
2147 memory reference is left unchanged and the caller is responsible for
2148 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2149 the underlying object, even partially, then the object is dropped.
2150 SIZE, if nonzero, is the size of an access in cases where MODE
2151 has no inherent size. */
f1ec5147
RK
2152
2153rtx
502b8322 2154adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2155 int validate, int adjust_address, int adjust_object,
2156 HOST_WIDE_INT size)
f1ec5147 2157{
823e3574 2158 rtx addr = XEXP (memref, 0);
60564289 2159 rtx new_rtx;
f12144dd 2160 enum machine_mode address_mode;
a6fe9ed4 2161 int pbits;
0207fa90 2162 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2163 unsigned HOST_WIDE_INT max_align;
0207fa90
EB
2164#ifdef POINTERS_EXTEND_UNSIGNED
2165 enum machine_mode pointer_mode
2166 = targetm.addr_space.pointer_mode (attrs.addrspace);
2167#endif
823e3574 2168
ee88e690
EB
2169 /* VOIDmode means no mode change for change_address_1. */
2170 if (mode == VOIDmode)
2171 mode = GET_MODE (memref);
2172
5f2cbd0d
RS
2173 /* Take the size of non-BLKmode accesses from the mode. */
2174 defattrs = mode_mem_attrs[(int) mode];
2175 if (defattrs->size_known_p)
2176 size = defattrs->size;
2177
fdb1c7b3
JH
2178 /* If there are no changes, just return the original memory reference. */
2179 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2180 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2181 && (!validate || memory_address_addr_space_p (mode, addr,
2182 attrs.addrspace)))
fdb1c7b3
JH
2183 return memref;
2184
d14419e4 2185 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2186 This may happen even if offset is nonzero -- consider
d14419e4
RH
2187 (plus (plus reg reg) const_int) -- so do this always. */
2188 addr = copy_rtx (addr);
2189
a6fe9ed4
JM
2190 /* Convert a possibly large offset to a signed value within the
2191 range of the target address space. */
372d6395 2192 address_mode = get_address_mode (memref);
d4ebfa65 2193 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2194 if (HOST_BITS_PER_WIDE_INT > pbits)
2195 {
2196 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2197 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2198 >> shift);
2199 }
2200
5ef0b50d 2201 if (adjust_address)
4a78c787
RH
2202 {
2203 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2204 object, we can merge it into the LO_SUM. */
2205 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2206 && offset >= 0
2207 && (unsigned HOST_WIDE_INT) offset
2208 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2209 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2210 plus_constant (address_mode,
2211 XEXP (addr, 1), offset));
0207fa90
EB
2212#ifdef POINTERS_EXTEND_UNSIGNED
2213 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2214 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2215 the fact that pointers are not allowed to overflow. */
2216 else if (POINTERS_EXTEND_UNSIGNED > 0
2217 && GET_CODE (addr) == ZERO_EXTEND
2218 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2219 && trunc_int_for_mode (offset, pointer_mode) == offset)
2220 addr = gen_rtx_ZERO_EXTEND (address_mode,
2221 plus_constant (pointer_mode,
2222 XEXP (addr, 0), offset));
2223#endif
4a78c787 2224 else
0a81f074 2225 addr = plus_constant (address_mode, addr, offset);
4a78c787 2226 }
823e3574 2227
23b33725 2228 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2229
09efeca1
PB
2230 /* If the address is a REG, change_address_1 rightfully returns memref,
2231 but this would destroy memref's MEM_ATTRS. */
2232 if (new_rtx == memref && offset != 0)
2233 new_rtx = copy_rtx (new_rtx);
2234
5ef0b50d
EB
2235 /* Conservatively drop the object if we don't know where we start from. */
2236 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2237 {
2238 attrs.expr = NULL_TREE;
2239 attrs.alias = 0;
2240 }
2241
738cc472
RK
2242 /* Compute the new values of the memory attributes due to this adjustment.
2243 We add the offsets and update the alignment. */
754c3d5d 2244 if (attrs.offset_known_p)
5ef0b50d
EB
2245 {
2246 attrs.offset += offset;
2247
2248 /* Drop the object if the new left end is not within its bounds. */
2249 if (adjust_object && attrs.offset < 0)
2250 {
2251 attrs.expr = NULL_TREE;
2252 attrs.alias = 0;
2253 }
2254 }
738cc472 2255
03bf2c23
RK
2256 /* Compute the new alignment by taking the MIN of the alignment and the
2257 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2258 if zero. */
2259 if (offset != 0)
f12144dd
RS
2260 {
2261 max_align = (offset & -offset) * BITS_PER_UNIT;
2262 attrs.align = MIN (attrs.align, max_align);
2263 }
738cc472 2264
5f2cbd0d 2265 if (size)
754c3d5d 2266 {
5ef0b50d 2267 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2268 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2269 {
2270 attrs.expr = NULL_TREE;
2271 attrs.alias = 0;
2272 }
754c3d5d 2273 attrs.size_known_p = true;
5f2cbd0d 2274 attrs.size = size;
754c3d5d
RS
2275 }
2276 else if (attrs.size_known_p)
5ef0b50d 2277 {
5f2cbd0d 2278 gcc_assert (!adjust_object);
5ef0b50d 2279 attrs.size -= offset;
5f2cbd0d
RS
2280 /* ??? The store_by_pieces machinery generates negative sizes,
2281 so don't assert for that here. */
5ef0b50d 2282 }
10b76d73 2283
f12144dd 2284 set_mem_attrs (new_rtx, &attrs);
738cc472 2285
60564289 2286 return new_rtx;
f1ec5147
RK
2287}
2288
630036c6
JJ
2289/* Return a memory reference like MEMREF, but with its mode changed
2290 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2291 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2292 nonzero, the memory address is forced to be valid. */
2293
2294rtx
502b8322
AJ
2295adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2296 HOST_WIDE_INT offset, int validate)
630036c6 2297{
23b33725 2298 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2299 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2300}
2301
8ac61af7
RK
2302/* Return a memory reference like MEMREF, but whose address is changed by
2303 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2304 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2305
2306rtx
502b8322 2307offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2308{
60564289 2309 rtx new_rtx, addr = XEXP (memref, 0);
f12144dd 2310 enum machine_mode address_mode;
754c3d5d 2311 struct mem_attrs attrs, *defattrs;
e3c8ea67 2312
f12144dd 2313 attrs = *get_mem_attrs (memref);
372d6395 2314 address_mode = get_address_mode (memref);
d4ebfa65 2315 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2316
68252e27 2317 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2318 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2319
2320 However, if we did go and rearrange things, we can wind up not
2321 being able to recognize the magic around pic_offset_table_rtx.
2322 This stuff is fragile, and is yet another example of why it is
2323 bad to expose PIC machinery too early. */
f12144dd
RS
2324 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2325 attrs.addrspace)
e3c8ea67
RH
2326 && GET_CODE (addr) == PLUS
2327 && XEXP (addr, 0) == pic_offset_table_rtx)
2328 {
2329 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2330 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2331 }
2332
60564289 2333 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2334 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2335
fdb1c7b3 2336 /* If there are no changes, just return the original memory reference. */
60564289
KG
2337 if (new_rtx == memref)
2338 return new_rtx;
fdb1c7b3 2339
0d4903b8
RK
2340 /* Update the alignment to reflect the offset. Reset the offset, which
2341 we don't know. */
754c3d5d
RS
2342 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2343 attrs.offset_known_p = false;
2344 attrs.size_known_p = defattrs->size_known_p;
2345 attrs.size = defattrs->size;
f12144dd
RS
2346 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2347 set_mem_attrs (new_rtx, &attrs);
60564289 2348 return new_rtx;
0d4903b8 2349}
68252e27 2350
792760b9
RK
2351/* Return a memory reference like MEMREF, but with its address changed to
2352 ADDR. The caller is asserting that the actual piece of memory pointed
2353 to is the same, just the form of the address is being changed, such as
23b33725
RS
2354 by putting something into a register. INPLACE is true if any changes
2355 can be made directly to MEMREF or false if MEMREF must be treated as
2356 immutable. */
792760b9
RK
2357
2358rtx
23b33725 2359replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2360{
738cc472
RK
2361 /* change_address_1 copies the memory attribute structure without change
2362 and that's exactly what we want here. */
40c0668b 2363 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2364 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2365}
738cc472 2366
f1ec5147
RK
2367/* Likewise, but the reference is not required to be valid. */
2368
2369rtx
23b33725 2370replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2371{
23b33725 2372 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2373}
e7dfe4bb
RH
2374
2375/* Return a memory reference like MEMREF, but with its mode widened to
2376 MODE and offset by OFFSET. This would be used by targets that e.g.
2377 cannot issue QImode memory operations and have to use SImode memory
2378 operations plus masking logic. */
2379
2380rtx
502b8322 2381widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2382{
5f2cbd0d 2383 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2384 struct mem_attrs attrs;
e7dfe4bb
RH
2385 unsigned int size = GET_MODE_SIZE (mode);
2386
fdb1c7b3 2387 /* If there are no changes, just return the original memory reference. */
60564289
KG
2388 if (new_rtx == memref)
2389 return new_rtx;
fdb1c7b3 2390
f12144dd
RS
2391 attrs = *get_mem_attrs (new_rtx);
2392
e7dfe4bb
RH
2393 /* If we don't know what offset we were at within the expression, then
2394 we can't know if we've overstepped the bounds. */
754c3d5d 2395 if (! attrs.offset_known_p)
f12144dd 2396 attrs.expr = NULL_TREE;
e7dfe4bb 2397
f12144dd 2398 while (attrs.expr)
e7dfe4bb 2399 {
f12144dd 2400 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2401 {
f12144dd
RS
2402 tree field = TREE_OPERAND (attrs.expr, 1);
2403 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2404
2405 if (! DECL_SIZE_UNIT (field))
2406 {
f12144dd 2407 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2408 break;
2409 }
2410
2411 /* Is the field at least as large as the access? If so, ok,
2412 otherwise strip back to the containing structure. */
03667700
RK
2413 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2414 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2415 && attrs.offset >= 0)
e7dfe4bb
RH
2416 break;
2417
cc269bb6 2418 if (! tree_fits_uhwi_p (offset))
e7dfe4bb 2419 {
f12144dd 2420 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2421 break;
2422 }
2423
f12144dd 2424 attrs.expr = TREE_OPERAND (attrs.expr, 0);
ae7e9ddd
RS
2425 attrs.offset += tree_to_uhwi (offset);
2426 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2427 / BITS_PER_UNIT);
e7dfe4bb
RH
2428 }
2429 /* Similarly for the decl. */
f12144dd
RS
2430 else if (DECL_P (attrs.expr)
2431 && DECL_SIZE_UNIT (attrs.expr)
2432 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2433 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2434 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2435 break;
2436 else
2437 {
2438 /* The widened memory access overflows the expression, which means
2439 that it could alias another expression. Zap it. */
f12144dd 2440 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2441 break;
2442 }
2443 }
2444
f12144dd 2445 if (! attrs.expr)
754c3d5d 2446 attrs.offset_known_p = false;
e7dfe4bb
RH
2447
2448 /* The widened memory may alias other stuff, so zap the alias set. */
2449 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2450 attrs.alias = 0;
754c3d5d
RS
2451 attrs.size_known_p = true;
2452 attrs.size = size;
f12144dd 2453 set_mem_attrs (new_rtx, &attrs);
60564289 2454 return new_rtx;
e7dfe4bb 2455}
23b2ce53 2456\f
f6129d66
RH
2457/* A fake decl that is used as the MEM_EXPR of spill slots. */
2458static GTY(()) tree spill_slot_decl;
2459
3d7e23f6
RH
2460tree
2461get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2462{
2463 tree d = spill_slot_decl;
2464 rtx rd;
f12144dd 2465 struct mem_attrs attrs;
f6129d66 2466
3d7e23f6 2467 if (d || !force_build_p)
f6129d66
RH
2468 return d;
2469
c2255bc4
AH
2470 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2471 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2472 DECL_ARTIFICIAL (d) = 1;
2473 DECL_IGNORED_P (d) = 1;
2474 TREE_USED (d) = 1;
f6129d66
RH
2475 spill_slot_decl = d;
2476
2477 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2478 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2479 attrs = *mode_mem_attrs[(int) BLKmode];
2480 attrs.alias = new_alias_set ();
2481 attrs.expr = d;
2482 set_mem_attrs (rd, &attrs);
f6129d66
RH
2483 SET_DECL_RTL (d, rd);
2484
2485 return d;
2486}
2487
2488/* Given MEM, a result from assign_stack_local, fill in the memory
2489 attributes as appropriate for a register allocator spill slot.
2490 These slots are not aliasable by other memory. We arrange for
2491 them all to use a single MEM_EXPR, so that the aliasing code can
2492 work properly in the case of shared spill slots. */
2493
2494void
2495set_mem_attrs_for_spill (rtx mem)
2496{
f12144dd
RS
2497 struct mem_attrs attrs;
2498 rtx addr;
f6129d66 2499
f12144dd
RS
2500 attrs = *get_mem_attrs (mem);
2501 attrs.expr = get_spill_slot_decl (true);
2502 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2503 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2504
2505 /* We expect the incoming memory to be of the form:
2506 (mem:MODE (plus (reg sfp) (const_int offset)))
2507 with perhaps the plus missing for offset = 0. */
2508 addr = XEXP (mem, 0);
754c3d5d
RS
2509 attrs.offset_known_p = true;
2510 attrs.offset = 0;
f6129d66 2511 if (GET_CODE (addr) == PLUS
481683e1 2512 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2513 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2514
f12144dd 2515 set_mem_attrs (mem, &attrs);
f6129d66
RH
2516 MEM_NOTRAP_P (mem) = 1;
2517}
2518\f
23b2ce53
RS
2519/* Return a newly created CODE_LABEL rtx with a unique label number. */
2520
7dcc3ab5 2521rtx_code_label *
502b8322 2522gen_label_rtx (void)
23b2ce53 2523{
7dcc3ab5
DM
2524 return as_a <rtx_code_label *> (
2525 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2526 NULL, label_num++, NULL));
23b2ce53
RS
2527}
2528\f
2529/* For procedure integration. */
2530
23b2ce53 2531/* Install new pointers to the first and last insns in the chain.
86fe05e0 2532 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2533 Used for an inline-procedure after copying the insn chain. */
2534
2535void
fee3e72c 2536set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2537{
fee3e72c 2538 rtx_insn *insn;
86fe05e0 2539
5936d944
JH
2540 set_first_insn (first);
2541 set_last_insn (last);
86fe05e0
RK
2542 cur_insn_uid = 0;
2543
b5b8b0ac
AO
2544 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2545 {
2546 int debug_count = 0;
2547
2548 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2549 cur_debug_insn_uid = 0;
2550
2551 for (insn = first; insn; insn = NEXT_INSN (insn))
2552 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2553 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2554 else
2555 {
2556 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2557 if (DEBUG_INSN_P (insn))
2558 debug_count++;
2559 }
2560
2561 if (debug_count)
2562 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2563 else
2564 cur_debug_insn_uid++;
2565 }
2566 else
2567 for (insn = first; insn; insn = NEXT_INSN (insn))
2568 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2569
2570 cur_insn_uid++;
23b2ce53 2571}
23b2ce53 2572\f
750c9258 2573/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2574 structure. This routine should only be called once. */
23b2ce53 2575
fd743bc1 2576static void
6bb9bf63 2577unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2578{
d1b81779 2579 /* Unshare just about everything else. */
2c07f13b 2580 unshare_all_rtl_in_chain (insn);
750c9258 2581
23b2ce53
RS
2582 /* Make sure the addresses of stack slots found outside the insn chain
2583 (such as, in DECL_RTL of a variable) are not shared
2584 with the insn chain.
2585
2586 This special care is necessary when the stack slot MEM does not
2587 actually appear in the insn chain. If it does appear, its address
2588 is unshared from all else at that point. */
0f4783c7
DM
2589 stack_slot_list = safe_as_a <rtx_expr_list *> (
2590 copy_rtx_if_shared (stack_slot_list));
23b2ce53
RS
2591}
2592
750c9258 2593/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2594 structure, again. This is a fairly expensive thing to do so it
2595 should be done sparingly. */
2596
2597void
6bb9bf63 2598unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2599{
6bb9bf63 2600 rtx_insn *p;
624c87aa
RE
2601 tree decl;
2602
d1b81779 2603 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2604 if (INSN_P (p))
d1b81779
GK
2605 {
2606 reset_used_flags (PATTERN (p));
2607 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2608 if (CALL_P (p))
2609 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2610 }
624c87aa 2611
2d4aecb3 2612 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2613 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2614
624c87aa 2615 /* Make sure that virtual parameters are not shared. */
910ad8de 2616 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2617 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2618
2619 reset_used_flags (stack_slot_list);
2620
b4aaa77b 2621 unshare_all_rtl_1 (insn);
fd743bc1
PB
2622}
2623
c2924966 2624unsigned int
fd743bc1
PB
2625unshare_all_rtl (void)
2626{
b4aaa77b 2627 unshare_all_rtl_1 (get_insns ());
c2924966 2628 return 0;
d1b81779
GK
2629}
2630
ef330312 2631
2c07f13b
JH
2632/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2633 Recursively does the same for subexpressions. */
2634
2635static void
2636verify_rtx_sharing (rtx orig, rtx insn)
2637{
2638 rtx x = orig;
2639 int i;
2640 enum rtx_code code;
2641 const char *format_ptr;
2642
2643 if (x == 0)
2644 return;
2645
2646 code = GET_CODE (x);
2647
2648 /* These types may be freely shared. */
2649
2650 switch (code)
2651 {
2652 case REG:
0ca5af51
AO
2653 case DEBUG_EXPR:
2654 case VALUE:
d8116890 2655 CASE_CONST_ANY:
2c07f13b
JH
2656 case SYMBOL_REF:
2657 case LABEL_REF:
2658 case CODE_LABEL:
2659 case PC:
2660 case CC0:
3810076b 2661 case RETURN:
26898771 2662 case SIMPLE_RETURN:
2c07f13b 2663 case SCRATCH:
3e89ed8d 2664 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2665 return;
3e89ed8d 2666 case CLOBBER:
c5c5ba89
JH
2667 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2668 clobbers or clobbers of hard registers that originated as pseudos.
2669 This is needed to allow safe register renaming. */
2670 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2671 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2672 return;
2673 break;
2c07f13b
JH
2674
2675 case CONST:
6fb5fa3c 2676 if (shared_const_p (orig))
2c07f13b
JH
2677 return;
2678 break;
2679
2680 case MEM:
2681 /* A MEM is allowed to be shared if its address is constant. */
2682 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2683 || reload_completed || reload_in_progress)
2684 return;
2685
2686 break;
2687
2688 default:
2689 break;
2690 }
2691
2692 /* This rtx may not be shared. If it has already been seen,
2693 replace it with a copy of itself. */
1a2caa7a 2694#ifdef ENABLE_CHECKING
2c07f13b
JH
2695 if (RTX_FLAG (x, used))
2696 {
ab532386 2697 error ("invalid rtl sharing found in the insn");
2c07f13b 2698 debug_rtx (insn);
ab532386 2699 error ("shared rtx");
2c07f13b 2700 debug_rtx (x);
ab532386 2701 internal_error ("internal consistency failure");
2c07f13b 2702 }
1a2caa7a
NS
2703#endif
2704 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2705
2c07f13b
JH
2706 RTX_FLAG (x, used) = 1;
2707
6614fd40 2708 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2709
2710 format_ptr = GET_RTX_FORMAT (code);
2711
2712 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2713 {
2714 switch (*format_ptr++)
2715 {
2716 case 'e':
2717 verify_rtx_sharing (XEXP (x, i), insn);
2718 break;
2719
2720 case 'E':
2721 if (XVEC (x, i) != NULL)
2722 {
2723 int j;
2724 int len = XVECLEN (x, i);
2725
2726 for (j = 0; j < len; j++)
2727 {
1a2caa7a
NS
2728 /* We allow sharing of ASM_OPERANDS inside single
2729 instruction. */
2c07f13b 2730 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2731 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2732 == ASM_OPERANDS))
2c07f13b
JH
2733 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2734 else
2735 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2736 }
2737 }
2738 break;
2739 }
2740 }
2741 return;
2742}
2743
0e0f87d4
SB
2744/* Reset used-flags for INSN. */
2745
2746static void
2747reset_insn_used_flags (rtx insn)
2748{
2749 gcc_assert (INSN_P (insn));
2750 reset_used_flags (PATTERN (insn));
2751 reset_used_flags (REG_NOTES (insn));
2752 if (CALL_P (insn))
2753 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2754}
2755
a24243a0 2756/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2757
a24243a0
AK
2758static void
2759reset_all_used_flags (void)
2c07f13b 2760{
dc01c3d1 2761 rtx_insn *p;
2c07f13b
JH
2762
2763 for (p = get_insns (); p; p = NEXT_INSN (p))
2764 if (INSN_P (p))
2765 {
0e0f87d4
SB
2766 rtx pat = PATTERN (p);
2767 if (GET_CODE (pat) != SEQUENCE)
2768 reset_insn_used_flags (p);
2769 else
2954a813 2770 {
0e0f87d4
SB
2771 gcc_assert (REG_NOTES (p) == NULL);
2772 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2773 {
2774 rtx insn = XVECEXP (pat, 0, i);
2775 if (INSN_P (insn))
2776 reset_insn_used_flags (insn);
2777 }
2954a813 2778 }
2c07f13b 2779 }
a24243a0
AK
2780}
2781
0e0f87d4
SB
2782/* Verify sharing in INSN. */
2783
2784static void
2785verify_insn_sharing (rtx insn)
2786{
2787 gcc_assert (INSN_P (insn));
2788 reset_used_flags (PATTERN (insn));
2789 reset_used_flags (REG_NOTES (insn));
2790 if (CALL_P (insn))
2791 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2792}
2793
a24243a0
AK
2794/* Go through all the RTL insn bodies and check that there is no unexpected
2795 sharing in between the subexpressions. */
2796
2797DEBUG_FUNCTION void
2798verify_rtl_sharing (void)
2799{
dc01c3d1 2800 rtx_insn *p;
a24243a0
AK
2801
2802 timevar_push (TV_VERIFY_RTL_SHARING);
2803
2804 reset_all_used_flags ();
2c07f13b
JH
2805
2806 for (p = get_insns (); p; p = NEXT_INSN (p))
2807 if (INSN_P (p))
2808 {
0e0f87d4
SB
2809 rtx pat = PATTERN (p);
2810 if (GET_CODE (pat) != SEQUENCE)
2811 verify_insn_sharing (p);
2812 else
2813 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2814 {
2815 rtx insn = XVECEXP (pat, 0, i);
2816 if (INSN_P (insn))
2817 verify_insn_sharing (insn);
2818 }
2c07f13b 2819 }
a222c01a 2820
a24243a0
AK
2821 reset_all_used_flags ();
2822
a222c01a 2823 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2824}
2825
d1b81779
GK
2826/* Go through all the RTL insn bodies and copy any invalid shared structure.
2827 Assumes the mark bits are cleared at entry. */
2828
2c07f13b 2829void
dc01c3d1 2830unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
2831{
2832 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2833 if (INSN_P (insn))
d1b81779
GK
2834 {
2835 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2836 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2837 if (CALL_P (insn))
2838 CALL_INSN_FUNCTION_USAGE (insn)
2839 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2840 }
2841}
2842
2d4aecb3 2843/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2844 shared. We never replace the DECL_RTLs themselves with a copy,
2845 but expressions mentioned into a DECL_RTL cannot be shared with
2846 expressions in the instruction stream.
2847
2848 Note that reload may convert pseudo registers into memories in-place.
2849 Pseudo registers are always shared, but MEMs never are. Thus if we
2850 reset the used flags on MEMs in the instruction stream, we must set
2851 them again on MEMs that appear in DECL_RTLs. */
2852
2d4aecb3 2853static void
5eb2a9f2 2854set_used_decls (tree blk)
2d4aecb3
AO
2855{
2856 tree t;
2857
2858 /* Mark decls. */
910ad8de 2859 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2860 if (DECL_RTL_SET_P (t))
5eb2a9f2 2861 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2862
2863 /* Now process sub-blocks. */
87caf699 2864 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2865 set_used_decls (t);
2d4aecb3
AO
2866}
2867
23b2ce53 2868/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2869 Recursively does the same for subexpressions. Uses
2870 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2871
2872rtx
502b8322 2873copy_rtx_if_shared (rtx orig)
23b2ce53 2874{
32b32b16
AP
2875 copy_rtx_if_shared_1 (&orig);
2876 return orig;
2877}
2878
ff954f39
AP
2879/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2880 use. Recursively does the same for subexpressions. */
2881
32b32b16
AP
2882static void
2883copy_rtx_if_shared_1 (rtx *orig1)
2884{
2885 rtx x;
b3694847
SS
2886 int i;
2887 enum rtx_code code;
32b32b16 2888 rtx *last_ptr;
b3694847 2889 const char *format_ptr;
23b2ce53 2890 int copied = 0;
32b32b16
AP
2891 int length;
2892
2893 /* Repeat is used to turn tail-recursion into iteration. */
2894repeat:
2895 x = *orig1;
23b2ce53
RS
2896
2897 if (x == 0)
32b32b16 2898 return;
23b2ce53
RS
2899
2900 code = GET_CODE (x);
2901
2902 /* These types may be freely shared. */
2903
2904 switch (code)
2905 {
2906 case REG:
0ca5af51
AO
2907 case DEBUG_EXPR:
2908 case VALUE:
d8116890 2909 CASE_CONST_ANY:
23b2ce53 2910 case SYMBOL_REF:
2c07f13b 2911 case LABEL_REF:
23b2ce53
RS
2912 case CODE_LABEL:
2913 case PC:
2914 case CC0:
276e0224 2915 case RETURN:
26898771 2916 case SIMPLE_RETURN:
23b2ce53 2917 case SCRATCH:
0f41302f 2918 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2919 return;
3e89ed8d 2920 case CLOBBER:
c5c5ba89
JH
2921 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2922 clobbers or clobbers of hard registers that originated as pseudos.
2923 This is needed to allow safe register renaming. */
2924 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2925 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2926 return;
2927 break;
23b2ce53 2928
b851ea09 2929 case CONST:
6fb5fa3c 2930 if (shared_const_p (x))
32b32b16 2931 return;
b851ea09
RK
2932 break;
2933
b5b8b0ac 2934 case DEBUG_INSN:
23b2ce53
RS
2935 case INSN:
2936 case JUMP_INSN:
2937 case CALL_INSN:
2938 case NOTE:
23b2ce53
RS
2939 case BARRIER:
2940 /* The chain of insns is not being copied. */
32b32b16 2941 return;
23b2ce53 2942
e9a25f70
JL
2943 default:
2944 break;
23b2ce53
RS
2945 }
2946
2947 /* This rtx may not be shared. If it has already been seen,
2948 replace it with a copy of itself. */
2949
2adc7f12 2950 if (RTX_FLAG (x, used))
23b2ce53 2951 {
aacd3885 2952 x = shallow_copy_rtx (x);
23b2ce53
RS
2953 copied = 1;
2954 }
2adc7f12 2955 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2956
2957 /* Now scan the subexpressions recursively.
2958 We can store any replaced subexpressions directly into X
2959 since we know X is not shared! Any vectors in X
2960 must be copied if X was copied. */
2961
2962 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2963 length = GET_RTX_LENGTH (code);
2964 last_ptr = NULL;
b8698a0f 2965
32b32b16 2966 for (i = 0; i < length; i++)
23b2ce53
RS
2967 {
2968 switch (*format_ptr++)
2969 {
2970 case 'e':
32b32b16
AP
2971 if (last_ptr)
2972 copy_rtx_if_shared_1 (last_ptr);
2973 last_ptr = &XEXP (x, i);
23b2ce53
RS
2974 break;
2975
2976 case 'E':
2977 if (XVEC (x, i) != NULL)
2978 {
b3694847 2979 int j;
f0722107 2980 int len = XVECLEN (x, i);
b8698a0f 2981
6614fd40
KH
2982 /* Copy the vector iff I copied the rtx and the length
2983 is nonzero. */
f0722107 2984 if (copied && len > 0)
8f985ec4 2985 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2986
5d3cc252 2987 /* Call recursively on all inside the vector. */
f0722107 2988 for (j = 0; j < len; j++)
32b32b16
AP
2989 {
2990 if (last_ptr)
2991 copy_rtx_if_shared_1 (last_ptr);
2992 last_ptr = &XVECEXP (x, i, j);
2993 }
23b2ce53
RS
2994 }
2995 break;
2996 }
2997 }
32b32b16
AP
2998 *orig1 = x;
2999 if (last_ptr)
3000 {
3001 orig1 = last_ptr;
3002 goto repeat;
3003 }
3004 return;
23b2ce53
RS
3005}
3006
76369a82 3007/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 3008
76369a82
NF
3009static void
3010mark_used_flags (rtx x, int flag)
23b2ce53 3011{
b3694847
SS
3012 int i, j;
3013 enum rtx_code code;
3014 const char *format_ptr;
32b32b16 3015 int length;
23b2ce53 3016
32b32b16
AP
3017 /* Repeat is used to turn tail-recursion into iteration. */
3018repeat:
23b2ce53
RS
3019 if (x == 0)
3020 return;
3021
3022 code = GET_CODE (x);
3023
9faa82d8 3024 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3025 for them. */
3026
3027 switch (code)
3028 {
3029 case REG:
0ca5af51
AO
3030 case DEBUG_EXPR:
3031 case VALUE:
d8116890 3032 CASE_CONST_ANY:
23b2ce53
RS
3033 case SYMBOL_REF:
3034 case CODE_LABEL:
3035 case PC:
3036 case CC0:
276e0224 3037 case RETURN:
26898771 3038 case SIMPLE_RETURN:
23b2ce53
RS
3039 return;
3040
b5b8b0ac 3041 case DEBUG_INSN:
23b2ce53
RS
3042 case INSN:
3043 case JUMP_INSN:
3044 case CALL_INSN:
3045 case NOTE:
3046 case LABEL_REF:
3047 case BARRIER:
3048 /* The chain of insns is not being copied. */
3049 return;
750c9258 3050
e9a25f70
JL
3051 default:
3052 break;
23b2ce53
RS
3053 }
3054
76369a82 3055 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3056
3057 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3058 length = GET_RTX_LENGTH (code);
b8698a0f 3059
32b32b16 3060 for (i = 0; i < length; i++)
23b2ce53
RS
3061 {
3062 switch (*format_ptr++)
3063 {
3064 case 'e':
32b32b16
AP
3065 if (i == length-1)
3066 {
3067 x = XEXP (x, i);
3068 goto repeat;
3069 }
76369a82 3070 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3071 break;
3072
3073 case 'E':
3074 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3075 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3076 break;
3077 }
3078 }
3079}
2c07f13b 3080
76369a82 3081/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3082 to look for shared sub-parts. */
3083
3084void
76369a82 3085reset_used_flags (rtx x)
2c07f13b 3086{
76369a82
NF
3087 mark_used_flags (x, 0);
3088}
2c07f13b 3089
76369a82
NF
3090/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3091 to look for shared sub-parts. */
2c07f13b 3092
76369a82
NF
3093void
3094set_used_flags (rtx x)
3095{
3096 mark_used_flags (x, 1);
2c07f13b 3097}
23b2ce53
RS
3098\f
3099/* Copy X if necessary so that it won't be altered by changes in OTHER.
3100 Return X or the rtx for the pseudo reg the value of X was copied into.
3101 OTHER must be valid as a SET_DEST. */
3102
3103rtx
502b8322 3104make_safe_from (rtx x, rtx other)
23b2ce53
RS
3105{
3106 while (1)
3107 switch (GET_CODE (other))
3108 {
3109 case SUBREG:
3110 other = SUBREG_REG (other);
3111 break;
3112 case STRICT_LOW_PART:
3113 case SIGN_EXTEND:
3114 case ZERO_EXTEND:
3115 other = XEXP (other, 0);
3116 break;
3117 default:
3118 goto done;
3119 }
3120 done:
3c0cb5de 3121 if ((MEM_P (other)
23b2ce53 3122 && ! CONSTANT_P (x)
f8cfc6aa 3123 && !REG_P (x)
23b2ce53 3124 && GET_CODE (x) != SUBREG)
f8cfc6aa 3125 || (REG_P (other)
23b2ce53
RS
3126 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3127 || reg_mentioned_p (other, x))))
3128 {
3129 rtx temp = gen_reg_rtx (GET_MODE (x));
3130 emit_move_insn (temp, x);
3131 return temp;
3132 }
3133 return x;
3134}
3135\f
3136/* Emission of insns (adding them to the doubly-linked list). */
3137
23b2ce53
RS
3138/* Return the last insn emitted, even if it is in a sequence now pushed. */
3139
db76cf1e 3140rtx_insn *
502b8322 3141get_last_insn_anywhere (void)
23b2ce53
RS
3142{
3143 struct sequence_stack *stack;
5936d944
JH
3144 if (get_last_insn ())
3145 return get_last_insn ();
49ad7cfa 3146 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
3147 if (stack->last != 0)
3148 return stack->last;
3149 return 0;
3150}
3151
2a496e8b
JDA
3152/* Return the first nonnote insn emitted in current sequence or current
3153 function. This routine looks inside SEQUENCEs. */
3154
e4685bc8 3155rtx_insn *
502b8322 3156get_first_nonnote_insn (void)
2a496e8b 3157{
dc01c3d1 3158 rtx_insn *insn = get_insns ();
91373fe8
JDA
3159
3160 if (insn)
3161 {
3162 if (NOTE_P (insn))
3163 for (insn = next_insn (insn);
3164 insn && NOTE_P (insn);
3165 insn = next_insn (insn))
3166 continue;
3167 else
3168 {
2ca202e7 3169 if (NONJUMP_INSN_P (insn)
91373fe8 3170 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3171 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3172 }
3173 }
2a496e8b
JDA
3174
3175 return insn;
3176}
3177
3178/* Return the last nonnote insn emitted in current sequence or current
3179 function. This routine looks inside SEQUENCEs. */
3180
e4685bc8 3181rtx_insn *
502b8322 3182get_last_nonnote_insn (void)
2a496e8b 3183{
dc01c3d1 3184 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3185
3186 if (insn)
3187 {
3188 if (NOTE_P (insn))
3189 for (insn = previous_insn (insn);
3190 insn && NOTE_P (insn);
3191 insn = previous_insn (insn))
3192 continue;
3193 else
3194 {
dc01c3d1
DM
3195 if (NONJUMP_INSN_P (insn))
3196 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3197 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3198 }
3199 }
2a496e8b
JDA
3200
3201 return insn;
3202}
3203
b5b8b0ac
AO
3204/* Return the number of actual (non-debug) insns emitted in this
3205 function. */
3206
3207int
3208get_max_insn_count (void)
3209{
3210 int n = cur_insn_uid;
3211
3212 /* The table size must be stable across -g, to avoid codegen
3213 differences due to debug insns, and not be affected by
3214 -fmin-insn-uid, to avoid excessive table size and to simplify
3215 debugging of -fcompare-debug failures. */
3216 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3217 n -= cur_debug_insn_uid;
3218 else
3219 n -= MIN_NONDEBUG_INSN_UID;
3220
3221 return n;
3222}
3223
23b2ce53
RS
3224\f
3225/* Return the next insn. If it is a SEQUENCE, return the first insn
3226 of the sequence. */
3227
eb51c837 3228rtx_insn *
4ce524a1 3229next_insn (rtx_insn *insn)
23b2ce53 3230{
75547801
KG
3231 if (insn)
3232 {
3233 insn = NEXT_INSN (insn);
3234 if (insn && NONJUMP_INSN_P (insn)
3235 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3236 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3237 }
23b2ce53 3238
dc01c3d1 3239 return insn;
23b2ce53
RS
3240}
3241
3242/* Return the previous insn. If it is a SEQUENCE, return the last insn
3243 of the sequence. */
3244
eb51c837 3245rtx_insn *
4ce524a1 3246previous_insn (rtx_insn *insn)
23b2ce53 3247{
75547801
KG
3248 if (insn)
3249 {
3250 insn = PREV_INSN (insn);
dc01c3d1
DM
3251 if (insn && NONJUMP_INSN_P (insn))
3252 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3253 insn = seq->insn (seq->len () - 1);
75547801 3254 }
23b2ce53 3255
dc01c3d1 3256 return insn;
23b2ce53
RS
3257}
3258
3259/* Return the next insn after INSN that is not a NOTE. This routine does not
3260 look inside SEQUENCEs. */
3261
eb51c837 3262rtx_insn *
dc01c3d1 3263next_nonnote_insn (rtx uncast_insn)
23b2ce53 3264{
dc01c3d1 3265 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
75547801
KG
3266 while (insn)
3267 {
3268 insn = NEXT_INSN (insn);
3269 if (insn == 0 || !NOTE_P (insn))
3270 break;
3271 }
23b2ce53 3272
dc01c3d1 3273 return insn;
23b2ce53
RS
3274}
3275
1e211590
DD
3276/* Return the next insn after INSN that is not a NOTE, but stop the
3277 search before we enter another basic block. This routine does not
3278 look inside SEQUENCEs. */
3279
eb51c837 3280rtx_insn *
e4685bc8 3281next_nonnote_insn_bb (rtx_insn *insn)
1e211590
DD
3282{
3283 while (insn)
3284 {
3285 insn = NEXT_INSN (insn);
3286 if (insn == 0 || !NOTE_P (insn))
3287 break;
3288 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3289 return NULL;
1e211590
DD
3290 }
3291
dc01c3d1 3292 return insn;
1e211590
DD
3293}
3294
23b2ce53
RS
3295/* Return the previous insn before INSN that is not a NOTE. This routine does
3296 not look inside SEQUENCEs. */
3297
eb51c837 3298rtx_insn *
dc01c3d1 3299prev_nonnote_insn (rtx uncast_insn)
23b2ce53 3300{
dc01c3d1
DM
3301 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3302
75547801
KG
3303 while (insn)
3304 {
3305 insn = PREV_INSN (insn);
3306 if (insn == 0 || !NOTE_P (insn))
3307 break;
3308 }
23b2ce53 3309
dc01c3d1 3310 return insn;
23b2ce53
RS
3311}
3312
896aa4ea
DD
3313/* Return the previous insn before INSN that is not a NOTE, but stop
3314 the search before we enter another basic block. This routine does
3315 not look inside SEQUENCEs. */
3316
eb51c837 3317rtx_insn *
dc01c3d1 3318prev_nonnote_insn_bb (rtx uncast_insn)
896aa4ea 3319{
dc01c3d1
DM
3320 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3321
896aa4ea
DD
3322 while (insn)
3323 {
3324 insn = PREV_INSN (insn);
3325 if (insn == 0 || !NOTE_P (insn))
3326 break;
3327 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3328 return NULL;
896aa4ea
DD
3329 }
3330
dc01c3d1 3331 return insn;
896aa4ea
DD
3332}
3333
b5b8b0ac
AO
3334/* Return the next insn after INSN that is not a DEBUG_INSN. This
3335 routine does not look inside SEQUENCEs. */
3336
eb51c837 3337rtx_insn *
dc01c3d1 3338next_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3339{
dc01c3d1
DM
3340 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3341
b5b8b0ac
AO
3342 while (insn)
3343 {
3344 insn = NEXT_INSN (insn);
3345 if (insn == 0 || !DEBUG_INSN_P (insn))
3346 break;
3347 }
3348
dc01c3d1 3349 return insn;
b5b8b0ac
AO
3350}
3351
3352/* Return the previous insn before INSN that is not a DEBUG_INSN.
3353 This routine does not look inside SEQUENCEs. */
3354
eb51c837 3355rtx_insn *
dc01c3d1 3356prev_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3357{
dc01c3d1
DM
3358 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3359
b5b8b0ac
AO
3360 while (insn)
3361 {
3362 insn = PREV_INSN (insn);
3363 if (insn == 0 || !DEBUG_INSN_P (insn))
3364 break;
3365 }
3366
dc01c3d1 3367 return insn;
b5b8b0ac
AO
3368}
3369
f0fc0803
JJ
3370/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3371 This routine does not look inside SEQUENCEs. */
3372
eb51c837 3373rtx_insn *
dc01c3d1 3374next_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3375{
dc01c3d1
DM
3376 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3377
f0fc0803
JJ
3378 while (insn)
3379 {
3380 insn = NEXT_INSN (insn);
3381 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3382 break;
3383 }
3384
dc01c3d1 3385 return insn;
f0fc0803
JJ
3386}
3387
3388/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3389 This routine does not look inside SEQUENCEs. */
3390
eb51c837 3391rtx_insn *
dc01c3d1 3392prev_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3393{
dc01c3d1
DM
3394 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3395
f0fc0803
JJ
3396 while (insn)
3397 {
3398 insn = PREV_INSN (insn);
3399 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3400 break;
3401 }
3402
dc01c3d1 3403 return insn;
f0fc0803
JJ
3404}
3405
23b2ce53
RS
3406/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3407 or 0, if there is none. This routine does not look inside
0f41302f 3408 SEQUENCEs. */
23b2ce53 3409
eb51c837 3410rtx_insn *
dc01c3d1 3411next_real_insn (rtx uncast_insn)
23b2ce53 3412{
dc01c3d1
DM
3413 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3414
75547801
KG
3415 while (insn)
3416 {
3417 insn = NEXT_INSN (insn);
3418 if (insn == 0 || INSN_P (insn))
3419 break;
3420 }
23b2ce53 3421
dc01c3d1 3422 return insn;
23b2ce53
RS
3423}
3424
3425/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3426 or 0, if there is none. This routine does not look inside
3427 SEQUENCEs. */
3428
eb51c837 3429rtx_insn *
dc01c3d1 3430prev_real_insn (rtx uncast_insn)
23b2ce53 3431{
dc01c3d1
DM
3432 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3433
75547801
KG
3434 while (insn)
3435 {
3436 insn = PREV_INSN (insn);
3437 if (insn == 0 || INSN_P (insn))
3438 break;
3439 }
23b2ce53 3440
dc01c3d1 3441 return insn;
23b2ce53
RS
3442}
3443
ee960939
OH
3444/* Return the last CALL_INSN in the current list, or 0 if there is none.
3445 This routine does not look inside SEQUENCEs. */
3446
049cfc4a 3447rtx_call_insn *
502b8322 3448last_call_insn (void)
ee960939 3449{
049cfc4a 3450 rtx_insn *insn;
ee960939
OH
3451
3452 for (insn = get_last_insn ();
4b4bf941 3453 insn && !CALL_P (insn);
ee960939
OH
3454 insn = PREV_INSN (insn))
3455 ;
3456
049cfc4a 3457 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3458}
3459
23b2ce53 3460/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3461 does not look inside SEQUENCEs. After reload this also skips over
3462 standalone USE and CLOBBER insn. */
23b2ce53 3463
69732dcb 3464int
4f588890 3465active_insn_p (const_rtx insn)
69732dcb 3466{
4b4bf941 3467 return (CALL_P (insn) || JUMP_P (insn)
39718607 3468 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3469 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3470 && (! reload_completed
3471 || (GET_CODE (PATTERN (insn)) != USE
3472 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3473}
3474
eb51c837 3475rtx_insn *
dc01c3d1 3476next_active_insn (rtx uncast_insn)
23b2ce53 3477{
dc01c3d1
DM
3478 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3479
75547801
KG
3480 while (insn)
3481 {
3482 insn = NEXT_INSN (insn);
3483 if (insn == 0 || active_insn_p (insn))
3484 break;
3485 }
23b2ce53 3486
dc01c3d1 3487 return insn;
23b2ce53
RS
3488}
3489
3490/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3491 does not look inside SEQUENCEs. After reload this also skips over
3492 standalone USE and CLOBBER insn. */
23b2ce53 3493
eb51c837 3494rtx_insn *
dc01c3d1 3495prev_active_insn (rtx uncast_insn)
23b2ce53 3496{
dc01c3d1
DM
3497 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3498
75547801
KG
3499 while (insn)
3500 {
3501 insn = PREV_INSN (insn);
3502 if (insn == 0 || active_insn_p (insn))
3503 break;
3504 }
23b2ce53 3505
dc01c3d1 3506 return insn;
23b2ce53 3507}
23b2ce53
RS
3508\f
3509#ifdef HAVE_cc0
3510/* Return the next insn that uses CC0 after INSN, which is assumed to
3511 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3512 applied to the result of this function should yield INSN).
3513
3514 Normally, this is simply the next insn. However, if a REG_CC_USER note
3515 is present, it contains the insn that uses CC0.
3516
3517 Return 0 if we can't find the insn. */
3518
75b46023 3519rtx_insn *
dc01c3d1 3520next_cc0_user (rtx uncast_insn)
23b2ce53 3521{
dc01c3d1
DM
3522 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3523
906c4e36 3524 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3525
3526 if (note)
75b46023 3527 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3528
3529 insn = next_nonnote_insn (insn);
4b4bf941 3530 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3531 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3532
2c3c49de 3533 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3534 return insn;
23b2ce53
RS
3535
3536 return 0;
3537}
3538
3539/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3540 note, it is the previous insn. */
3541
75b46023 3542rtx_insn *
dc01c3d1 3543prev_cc0_setter (rtx uncast_insn)
23b2ce53 3544{
dc01c3d1
DM
3545 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3546
906c4e36 3547 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3548
3549 if (note)
75b46023 3550 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3551
3552 insn = prev_nonnote_insn (insn);
5b0264cb 3553 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3554
dc01c3d1 3555 return insn;
23b2ce53
RS
3556}
3557#endif
e5bef2e4 3558
594f8779
RZ
3559#ifdef AUTO_INC_DEC
3560/* Find a RTX_AUTOINC class rtx which matches DATA. */
3561
3562static int
9021b8ec 3563find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3564{
9021b8ec
RS
3565 subrtx_iterator::array_type array;
3566 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3567 {
9021b8ec
RS
3568 const_rtx x = *iter;
3569 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3570 && rtx_equal_p (reg, XEXP (x, 0)))
3571 return true;
594f8779 3572 }
9021b8ec 3573 return false;
594f8779
RZ
3574}
3575#endif
3576
e5bef2e4
HB
3577/* Increment the label uses for all labels present in rtx. */
3578
3579static void
502b8322 3580mark_label_nuses (rtx x)
e5bef2e4 3581{
b3694847
SS
3582 enum rtx_code code;
3583 int i, j;
3584 const char *fmt;
e5bef2e4
HB
3585
3586 code = GET_CODE (x);
a827d9b1
DM
3587 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3588 LABEL_NUSES (LABEL_REF_LABEL (x))++;
e5bef2e4
HB
3589
3590 fmt = GET_RTX_FORMAT (code);
3591 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3592 {
3593 if (fmt[i] == 'e')
0fb7aeda 3594 mark_label_nuses (XEXP (x, i));
e5bef2e4 3595 else if (fmt[i] == 'E')
0fb7aeda 3596 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3597 mark_label_nuses (XVECEXP (x, i, j));
3598 }
3599}
3600
23b2ce53
RS
3601\f
3602/* Try splitting insns that can be split for better scheduling.
3603 PAT is the pattern which might split.
3604 TRIAL is the insn providing PAT.
cc2902df 3605 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3606
3607 If this routine succeeds in splitting, it returns the first or last
11147ebe 3608 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3609 returns TRIAL. If the insn to be returned can be split, it will be. */
3610
53f04688 3611rtx_insn *
dc01c3d1 3612try_split (rtx pat, rtx uncast_trial, int last)
23b2ce53 3613{
dc01c3d1 3614 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
53f04688
DM
3615 rtx_insn *before = PREV_INSN (trial);
3616 rtx_insn *after = NEXT_INSN (trial);
23b2ce53 3617 int has_barrier = 0;
dc01c3d1
DM
3618 rtx note;
3619 rtx_insn *seq, *tem;
6b24c259 3620 int probability;
dc01c3d1 3621 rtx_insn *insn_last, *insn;
599aedd9 3622 int njumps = 0;
4f660b15 3623 rtx call_insn = NULL_RTX;
6b24c259 3624
cd9c1ca8
RH
3625 /* We're not good at redistributing frame information. */
3626 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3627 return trial;
cd9c1ca8 3628
6b24c259
JH
3629 if (any_condjump_p (trial)
3630 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
e5af9ddd 3631 split_branch_probability = XINT (note, 0);
6b24c259
JH
3632 probability = split_branch_probability;
3633
dc01c3d1 3634 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
6b24c259
JH
3635
3636 split_branch_probability = -1;
23b2ce53
RS
3637
3638 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3639 We may need to handle this specially. */
4b4bf941 3640 if (after && BARRIER_P (after))
23b2ce53
RS
3641 {
3642 has_barrier = 1;
3643 after = NEXT_INSN (after);
3644 }
3645
599aedd9 3646 if (!seq)
dc01c3d1 3647 return trial;
599aedd9
RH
3648
3649 /* Avoid infinite loop if any insn of the result matches
3650 the original pattern. */
3651 insn_last = seq;
3652 while (1)
23b2ce53 3653 {
599aedd9
RH
3654 if (INSN_P (insn_last)
3655 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3656 return trial;
599aedd9
RH
3657 if (!NEXT_INSN (insn_last))
3658 break;
3659 insn_last = NEXT_INSN (insn_last);
3660 }
750c9258 3661
6fb5fa3c
DB
3662 /* We will be adding the new sequence to the function. The splitters
3663 may have introduced invalid RTL sharing, so unshare the sequence now. */
3664 unshare_all_rtl_in_chain (seq);
3665
339ba33b 3666 /* Mark labels and copy flags. */
599aedd9
RH
3667 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3668 {
4b4bf941 3669 if (JUMP_P (insn))
599aedd9 3670 {
339ba33b
RS
3671 if (JUMP_P (trial))
3672 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3673 mark_jump_label (PATTERN (insn), insn, 0);
3674 njumps++;
3675 if (probability != -1
3676 && any_condjump_p (insn)
3677 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3678 {
599aedd9
RH
3679 /* We can preserve the REG_BR_PROB notes only if exactly
3680 one jump is created, otherwise the machine description
3681 is responsible for this step using
3682 split_branch_probability variable. */
5b0264cb 3683 gcc_assert (njumps == 1);
e5af9ddd 3684 add_int_reg_note (insn, REG_BR_PROB, probability);
2f937369 3685 }
599aedd9
RH
3686 }
3687 }
3688
3689 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3690 in SEQ and copy any additional information across. */
4b4bf941 3691 if (CALL_P (trial))
599aedd9
RH
3692 {
3693 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3694 if (CALL_P (insn))
599aedd9 3695 {
dc01c3d1
DM
3696 rtx_insn *next;
3697 rtx *p;
65712d5c 3698
4f660b15
RO
3699 gcc_assert (call_insn == NULL_RTX);
3700 call_insn = insn;
3701
65712d5c
RS
3702 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3703 target may have explicitly specified. */
3704 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3705 while (*p)
3706 p = &XEXP (*p, 1);
3707 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3708
3709 /* If the old call was a sibling call, the new one must
3710 be too. */
599aedd9 3711 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3712
3713 /* If the new call is the last instruction in the sequence,
3714 it will effectively replace the old call in-situ. Otherwise
3715 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3716 so that it comes immediately after the new call. */
3717 if (NEXT_INSN (insn))
65f3dedb
RS
3718 for (next = NEXT_INSN (trial);
3719 next && NOTE_P (next);
3720 next = NEXT_INSN (next))
3721 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3722 {
3723 remove_insn (next);
3724 add_insn_after (next, insn, NULL);
65f3dedb 3725 break;
65712d5c 3726 }
599aedd9
RH
3727 }
3728 }
4b5e8abe 3729
599aedd9
RH
3730 /* Copy notes, particularly those related to the CFG. */
3731 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3732 {
3733 switch (REG_NOTE_KIND (note))
3734 {
3735 case REG_EH_REGION:
1d65f45c 3736 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3737 break;
216183ce 3738
599aedd9
RH
3739 case REG_NORETURN:
3740 case REG_SETJMP:
0a35513e 3741 case REG_TM:
594f8779 3742 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3743 {
4b4bf941 3744 if (CALL_P (insn))
65c5f2a6 3745 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3746 }
599aedd9 3747 break;
d6e95df8 3748
599aedd9 3749 case REG_NON_LOCAL_GOTO:
594f8779 3750 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3751 {
4b4bf941 3752 if (JUMP_P (insn))
65c5f2a6 3753 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3754 }
599aedd9 3755 break;
e5bef2e4 3756
594f8779
RZ
3757#ifdef AUTO_INC_DEC
3758 case REG_INC:
3759 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3760 {
3761 rtx reg = XEXP (note, 0);
3762 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3763 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3764 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3765 }
3766 break;
3767#endif
3768
9a08d230 3769 case REG_ARGS_SIZE:
e5b51ca0 3770 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
9a08d230
RH
3771 break;
3772
4f660b15
RO
3773 case REG_CALL_DECL:
3774 gcc_assert (call_insn != NULL_RTX);
3775 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3776 break;
3777
599aedd9
RH
3778 default:
3779 break;
23b2ce53 3780 }
599aedd9
RH
3781 }
3782
3783 /* If there are LABELS inside the split insns increment the
3784 usage count so we don't delete the label. */
cf7c4aa6 3785 if (INSN_P (trial))
599aedd9
RH
3786 {
3787 insn = insn_last;
3788 while (insn != NULL_RTX)
23b2ce53 3789 {
cf7c4aa6 3790 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3791 if (NONJUMP_INSN_P (insn))
599aedd9 3792 mark_label_nuses (PATTERN (insn));
23b2ce53 3793
599aedd9
RH
3794 insn = PREV_INSN (insn);
3795 }
23b2ce53
RS
3796 }
3797
5368224f 3798 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3799
3800 delete_insn (trial);
3801 if (has_barrier)
3802 emit_barrier_after (tem);
3803
3804 /* Recursively call try_split for each new insn created; by the
3805 time control returns here that insn will be fully split, so
3806 set LAST and continue from the insn after the one returned.
3807 We can't use next_active_insn here since AFTER may be a note.
3808 Ignore deleted insns, which can be occur if not optimizing. */
3809 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3810 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3811 tem = try_split (PATTERN (tem), tem, 1);
3812
3813 /* Return either the first or the last insn, depending on which was
3814 requested. */
3815 return last
5936d944 3816 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3817 : NEXT_INSN (before);
23b2ce53
RS
3818}
3819\f
3820/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3821 Store PATTERN in the pattern slots. */
23b2ce53 3822
167b9fae 3823rtx_insn *
502b8322 3824make_insn_raw (rtx pattern)
23b2ce53 3825{
167b9fae 3826 rtx_insn *insn;
23b2ce53 3827
167b9fae 3828 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 3829
43127294 3830 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3831 PATTERN (insn) = pattern;
3832 INSN_CODE (insn) = -1;
1632afca 3833 REG_NOTES (insn) = NULL;
5368224f 3834 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3835 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3836
47984720
NC
3837#ifdef ENABLE_RTL_CHECKING
3838 if (insn
2c3c49de 3839 && INSN_P (insn)
47984720
NC
3840 && (returnjump_p (insn)
3841 || (GET_CODE (insn) == SET
3842 && SET_DEST (insn) == pc_rtx)))
3843 {
d4ee4d25 3844 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3845 debug_rtx (insn);
3846 }
3847#endif
750c9258 3848
23b2ce53
RS
3849 return insn;
3850}
3851
b5b8b0ac
AO
3852/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3853
167b9fae 3854static rtx_insn *
b5b8b0ac
AO
3855make_debug_insn_raw (rtx pattern)
3856{
167b9fae 3857 rtx_debug_insn *insn;
b5b8b0ac 3858
167b9fae 3859 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
3860 INSN_UID (insn) = cur_debug_insn_uid++;
3861 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3862 INSN_UID (insn) = cur_insn_uid++;
3863
3864 PATTERN (insn) = pattern;
3865 INSN_CODE (insn) = -1;
3866 REG_NOTES (insn) = NULL;
5368224f 3867 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3868 BLOCK_FOR_INSN (insn) = NULL;
3869
3870 return insn;
3871}
3872
2f937369 3873/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3874
167b9fae 3875static rtx_insn *
502b8322 3876make_jump_insn_raw (rtx pattern)
23b2ce53 3877{
167b9fae 3878 rtx_jump_insn *insn;
23b2ce53 3879
167b9fae 3880 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 3881 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3882
3883 PATTERN (insn) = pattern;
3884 INSN_CODE (insn) = -1;
1632afca
RS
3885 REG_NOTES (insn) = NULL;
3886 JUMP_LABEL (insn) = NULL;
5368224f 3887 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3888 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3889
3890 return insn;
3891}
aff507f4 3892
2f937369 3893/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 3894
167b9fae 3895static rtx_insn *
502b8322 3896make_call_insn_raw (rtx pattern)
aff507f4 3897{
167b9fae 3898 rtx_call_insn *insn;
aff507f4 3899
167b9fae 3900 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
3901 INSN_UID (insn) = cur_insn_uid++;
3902
3903 PATTERN (insn) = pattern;
3904 INSN_CODE (insn) = -1;
aff507f4
RK
3905 REG_NOTES (insn) = NULL;
3906 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3907 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3908 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3909
3910 return insn;
3911}
96fba521
SB
3912
3913/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3914
66e8df53 3915static rtx_note *
96fba521
SB
3916make_note_raw (enum insn_note subtype)
3917{
3918 /* Some notes are never created this way at all. These notes are
3919 only created by patching out insns. */
3920 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3921 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3922
66e8df53 3923 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
3924 INSN_UID (note) = cur_insn_uid++;
3925 NOTE_KIND (note) = subtype;
3926 BLOCK_FOR_INSN (note) = NULL;
3927 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3928 return note;
3929}
23b2ce53 3930\f
96fba521
SB
3931/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3932 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3933 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3934
3935static inline void
9152e0aa 3936link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 3937{
0f82e5c9
DM
3938 SET_PREV_INSN (insn) = prev;
3939 SET_NEXT_INSN (insn) = next;
96fba521
SB
3940 if (prev != NULL)
3941 {
0f82e5c9 3942 SET_NEXT_INSN (prev) = insn;
96fba521
SB
3943 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3944 {
e6eda746
DM
3945 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3946 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
3947 }
3948 }
3949 if (next != NULL)
3950 {
0f82e5c9 3951 SET_PREV_INSN (next) = insn;
96fba521 3952 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
3953 {
3954 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3955 SET_PREV_INSN (sequence->insn (0)) = insn;
3956 }
96fba521 3957 }
3ccb989e
SB
3958
3959 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3960 {
e6eda746
DM
3961 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3962 SET_PREV_INSN (sequence->insn (0)) = prev;
3963 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 3964 }
96fba521
SB
3965}
3966
23b2ce53
RS
3967/* Add INSN to the end of the doubly-linked list.
3968 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3969
3970void
9152e0aa 3971add_insn (rtx_insn *insn)
23b2ce53 3972{
9152e0aa 3973 rtx_insn *prev = get_last_insn ();
96fba521 3974 link_insn_into_chain (insn, prev, NULL);
5936d944
JH
3975 if (NULL == get_insns ())
3976 set_first_insn (insn);
5936d944 3977 set_last_insn (insn);
23b2ce53
RS
3978}
3979
96fba521 3980/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 3981
96fba521 3982static void
9152e0aa 3983add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 3984{
9152e0aa 3985 rtx_insn *next = NEXT_INSN (after);
23b2ce53 3986
5b0264cb 3987 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3988
96fba521 3989 link_insn_into_chain (insn, after, next);
23b2ce53 3990
96fba521 3991 if (next == NULL)
23b2ce53 3992 {
96fba521
SB
3993 if (get_last_insn () == after)
3994 set_last_insn (insn);
3995 else
3996 {
3997 struct sequence_stack *stack = seq_stack;
3998 /* Scan all pending sequences too. */
3999 for (; stack; stack = stack->next)
4000 if (after == stack->last)
4001 {
4002 stack->last = insn;
4003 break;
4004 }
4005 }
23b2ce53 4006 }
96fba521
SB
4007}
4008
4009/* Add INSN into the doubly-linked list before insn BEFORE. */
4010
4011static void
9152e0aa 4012add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4013{
9152e0aa 4014 rtx_insn *prev = PREV_INSN (before);
96fba521
SB
4015
4016 gcc_assert (!optimize || !INSN_DELETED_P (before));
4017
4018 link_insn_into_chain (insn, prev, before);
4019
4020 if (prev == NULL)
23b2ce53 4021 {
96fba521
SB
4022 if (get_insns () == before)
4023 set_first_insn (insn);
4024 else
4025 {
4026 struct sequence_stack *stack = seq_stack;
4027 /* Scan all pending sequences too. */
4028 for (; stack; stack = stack->next)
4029 if (before == stack->first)
4030 {
4031 stack->first = insn;
4032 break;
4033 }
a0ae8e8d 4034
96fba521
SB
4035 gcc_assert (stack);
4036 }
23b2ce53 4037 }
96fba521
SB
4038}
4039
4040/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4041 If BB is NULL, an attempt is made to infer the bb from before.
4042
4043 This and the next function should be the only functions called
4044 to insert an insn once delay slots have been filled since only
4045 they know how to update a SEQUENCE. */
23b2ce53 4046
96fba521 4047void
9152e0aa 4048add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
96fba521 4049{
1130d5e3 4050 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
9152e0aa 4051 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
96fba521 4052 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4053 if (!BARRIER_P (after)
4054 && !BARRIER_P (insn)
3c030e88
JH
4055 && (bb = BLOCK_FOR_INSN (after)))
4056 {
4057 set_block_for_insn (insn, bb);
38c1593d 4058 if (INSN_P (insn))
6fb5fa3c 4059 df_insn_rescan (insn);
3c030e88 4060 /* Should not happen as first in the BB is always
a1f300c0 4061 either NOTE or LABEL. */
a813c111 4062 if (BB_END (bb) == after
3c030e88 4063 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4064 && !BARRIER_P (insn)
a38e7aa5 4065 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4066 BB_END (bb) = insn;
3c030e88 4067 }
23b2ce53
RS
4068}
4069
96fba521
SB
4070/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4071 If BB is NULL, an attempt is made to infer the bb from before.
4072
4073 This and the previous function should be the only functions called
4074 to insert an insn once delay slots have been filled since only
4075 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4076
4077void
9152e0aa 4078add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
a0ae8e8d 4079{
9152e0aa
DM
4080 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4081 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
96fba521 4082 add_insn_before_nobb (insn, before);
a0ae8e8d 4083
b8698a0f 4084 if (!bb
6fb5fa3c
DB
4085 && !BARRIER_P (before)
4086 && !BARRIER_P (insn))
4087 bb = BLOCK_FOR_INSN (before);
4088
4089 if (bb)
3c030e88
JH
4090 {
4091 set_block_for_insn (insn, bb);
38c1593d 4092 if (INSN_P (insn))
6fb5fa3c 4093 df_insn_rescan (insn);
5b0264cb 4094 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4095 LABEL. */
5b0264cb
NS
4096 gcc_assert (BB_HEAD (bb) != insn
4097 /* Avoid clobbering of structure when creating new BB. */
4098 || BARRIER_P (insn)
a38e7aa5 4099 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4100 }
a0ae8e8d
RK
4101}
4102
6fb5fa3c
DB
4103/* Replace insn with an deleted instruction note. */
4104
0ce2b299
EB
4105void
4106set_insn_deleted (rtx insn)
6fb5fa3c 4107{
39718607 4108 if (INSN_P (insn))
b2908ba6 4109 df_insn_delete (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
4110 PUT_CODE (insn, NOTE);
4111 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4112}
4113
4114
1f397f45
SB
4115/* Unlink INSN from the insn chain.
4116
4117 This function knows how to handle sequences.
4118
4119 This function does not invalidate data flow information associated with
4120 INSN (i.e. does not call df_insn_delete). That makes this function
4121 usable for only disconnecting an insn from the chain, and re-emit it
4122 elsewhere later.
4123
4124 To later insert INSN elsewhere in the insn chain via add_insn and
4125 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4126 the caller. Nullifying them here breaks many insn chain walks.
4127
4128 To really delete an insn and related DF information, use delete_insn. */
4129
89e99eea 4130void
dc01c3d1 4131remove_insn (rtx uncast_insn)
89e99eea 4132{
dc01c3d1 4133 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1130d5e3
DM
4134 rtx_insn *next = NEXT_INSN (insn);
4135 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4136 basic_block bb;
4137
89e99eea
DB
4138 if (prev)
4139 {
0f82e5c9 4140 SET_NEXT_INSN (prev) = next;
4b4bf941 4141 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4142 {
e6eda746
DM
4143 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4144 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4145 }
4146 }
5936d944
JH
4147 else if (get_insns () == insn)
4148 {
fb9ef4c1 4149 if (next)
0f82e5c9 4150 SET_PREV_INSN (next) = NULL;
5936d944
JH
4151 set_first_insn (next);
4152 }
89e99eea
DB
4153 else
4154 {
49ad7cfa 4155 struct sequence_stack *stack = seq_stack;
89e99eea
DB
4156 /* Scan all pending sequences too. */
4157 for (; stack; stack = stack->next)
4158 if (insn == stack->first)
4159 {
4160 stack->first = next;
4161 break;
4162 }
4163
5b0264cb 4164 gcc_assert (stack);
89e99eea
DB
4165 }
4166
4167 if (next)
4168 {
0f82e5c9 4169 SET_PREV_INSN (next) = prev;
4b4bf941 4170 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4171 {
4172 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4173 SET_PREV_INSN (sequence->insn (0)) = prev;
4174 }
89e99eea 4175 }
5936d944
JH
4176 else if (get_last_insn () == insn)
4177 set_last_insn (prev);
89e99eea
DB
4178 else
4179 {
49ad7cfa 4180 struct sequence_stack *stack = seq_stack;
89e99eea
DB
4181 /* Scan all pending sequences too. */
4182 for (; stack; stack = stack->next)
4183 if (insn == stack->last)
4184 {
4185 stack->last = prev;
4186 break;
4187 }
4188
5b0264cb 4189 gcc_assert (stack);
89e99eea 4190 }
80eb8028 4191
80eb8028 4192 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4193 if (!BARRIER_P (insn)
53c17031
JH
4194 && (bb = BLOCK_FOR_INSN (insn)))
4195 {
a813c111 4196 if (BB_HEAD (bb) == insn)
53c17031 4197 {
3bf1e984
RK
4198 /* Never ever delete the basic block note without deleting whole
4199 basic block. */
5b0264cb 4200 gcc_assert (!NOTE_P (insn));
1130d5e3 4201 BB_HEAD (bb) = next;
53c17031 4202 }
a813c111 4203 if (BB_END (bb) == insn)
1130d5e3 4204 BB_END (bb) = prev;
53c17031 4205 }
89e99eea
DB
4206}
4207
ee960939
OH
4208/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4209
4210void
502b8322 4211add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4212{
5b0264cb 4213 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4214
4215 /* Put the register usage information on the CALL. If there is already
4216 some usage information, put ours at the end. */
4217 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4218 {
4219 rtx link;
4220
4221 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4222 link = XEXP (link, 1))
4223 ;
4224
4225 XEXP (link, 1) = call_fusage;
4226 }
4227 else
4228 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4229}
4230
23b2ce53
RS
4231/* Delete all insns made since FROM.
4232 FROM becomes the new last instruction. */
4233
4234void
fee3e72c 4235delete_insns_since (rtx_insn *from)
23b2ce53
RS
4236{
4237 if (from == 0)
5936d944 4238 set_first_insn (0);
23b2ce53 4239 else
0f82e5c9 4240 SET_NEXT_INSN (from) = 0;
5936d944 4241 set_last_insn (from);
23b2ce53
RS
4242}
4243
5dab5552
MS
4244/* This function is deprecated, please use sequences instead.
4245
4246 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4247 The insns to be moved are those between FROM and TO.
4248 They are moved to a new position after the insn AFTER.
4249 AFTER must not be FROM or TO or any insn in between.
4250
4251 This function does not know about SEQUENCEs and hence should not be
4252 called after delay-slot filling has been done. */
4253
4254void
fee3e72c 4255reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4256{
4f8344eb 4257#ifdef ENABLE_CHECKING
fee3e72c 4258 rtx_insn *x;
4f8344eb
HPN
4259 for (x = from; x != to; x = NEXT_INSN (x))
4260 gcc_assert (after != x);
4261 gcc_assert (after != to);
4262#endif
4263
23b2ce53
RS
4264 /* Splice this bunch out of where it is now. */
4265 if (PREV_INSN (from))
0f82e5c9 4266 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4267 if (NEXT_INSN (to))
0f82e5c9 4268 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4269 if (get_last_insn () == to)
4270 set_last_insn (PREV_INSN (from));
4271 if (get_insns () == from)
4272 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4273
4274 /* Make the new neighbors point to it and it to them. */
4275 if (NEXT_INSN (after))
0f82e5c9 4276 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4277
0f82e5c9
DM
4278 SET_NEXT_INSN (to) = NEXT_INSN (after);
4279 SET_PREV_INSN (from) = after;
4280 SET_NEXT_INSN (after) = from;
c3284718 4281 if (after == get_last_insn ())
5936d944 4282 set_last_insn (to);
23b2ce53
RS
4283}
4284
3c030e88
JH
4285/* Same as function above, but take care to update BB boundaries. */
4286void
ac9d2d2c 4287reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4288{
ac9d2d2c 4289 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4290 basic_block bb, bb2;
4291
4292 reorder_insns_nobb (from, to, after);
4293
4b4bf941 4294 if (!BARRIER_P (after)
3c030e88
JH
4295 && (bb = BLOCK_FOR_INSN (after)))
4296 {
b2908ba6 4297 rtx_insn *x;
6fb5fa3c 4298 df_set_bb_dirty (bb);
68252e27 4299
4b4bf941 4300 if (!BARRIER_P (from)
3c030e88
JH
4301 && (bb2 = BLOCK_FOR_INSN (from)))
4302 {
a813c111 4303 if (BB_END (bb2) == to)
1130d5e3 4304 BB_END (bb2) = prev;
6fb5fa3c 4305 df_set_bb_dirty (bb2);
3c030e88
JH
4306 }
4307
a813c111 4308 if (BB_END (bb) == after)
1130d5e3 4309 BB_END (bb) = to;
3c030e88
JH
4310
4311 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4312 if (!BARRIER_P (x))
63642d5a 4313 df_insn_change_bb (x, bb);
3c030e88
JH
4314 }
4315}
4316
23b2ce53 4317\f
2f937369
DM
4318/* Emit insn(s) of given code and pattern
4319 at a specified place within the doubly-linked list.
23b2ce53 4320
2f937369
DM
4321 All of the emit_foo global entry points accept an object
4322 X which is either an insn list or a PATTERN of a single
4323 instruction.
23b2ce53 4324
2f937369
DM
4325 There are thus a few canonical ways to generate code and
4326 emit it at a specific place in the instruction stream. For
4327 example, consider the instruction named SPOT and the fact that
4328 we would like to emit some instructions before SPOT. We might
4329 do it like this:
23b2ce53 4330
2f937369
DM
4331 start_sequence ();
4332 ... emit the new instructions ...
4333 insns_head = get_insns ();
4334 end_sequence ();
23b2ce53 4335
2f937369 4336 emit_insn_before (insns_head, SPOT);
23b2ce53 4337
2f937369
DM
4338 It used to be common to generate SEQUENCE rtl instead, but that
4339 is a relic of the past which no longer occurs. The reason is that
4340 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4341 generated would almost certainly die right after it was created. */
23b2ce53 4342
cd459bf8 4343static rtx_insn *
5f02387d 4344emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
167b9fae 4345 rtx_insn *(*make_raw) (rtx))
23b2ce53 4346{
167b9fae 4347 rtx_insn *insn;
23b2ce53 4348
5b0264cb 4349 gcc_assert (before);
2f937369
DM
4350
4351 if (x == NULL_RTX)
cd459bf8 4352 return safe_as_a <rtx_insn *> (last);
2f937369
DM
4353
4354 switch (GET_CODE (x))
23b2ce53 4355 {
b5b8b0ac 4356 case DEBUG_INSN:
2f937369
DM
4357 case INSN:
4358 case JUMP_INSN:
4359 case CALL_INSN:
4360 case CODE_LABEL:
4361 case BARRIER:
4362 case NOTE:
167b9fae 4363 insn = as_a <rtx_insn *> (x);
2f937369
DM
4364 while (insn)
4365 {
167b9fae 4366 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4367 add_insn_before (insn, before, bb);
2f937369
DM
4368 last = insn;
4369 insn = next;
4370 }
4371 break;
4372
4373#ifdef ENABLE_RTL_CHECKING
4374 case SEQUENCE:
5b0264cb 4375 gcc_unreachable ();
2f937369
DM
4376 break;
4377#endif
4378
4379 default:
5f02387d 4380 last = (*make_raw) (x);
6fb5fa3c 4381 add_insn_before (last, before, bb);
2f937369 4382 break;
23b2ce53
RS
4383 }
4384
cd459bf8 4385 return safe_as_a <rtx_insn *> (last);
23b2ce53
RS
4386}
4387
5f02387d
NF
4388/* Make X be output before the instruction BEFORE. */
4389
cd459bf8 4390rtx_insn *
596f2b17 4391emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f02387d
NF
4392{
4393 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4394}
4395
2f937369 4396/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4397 and output it before the instruction BEFORE. */
4398
cd459bf8 4399rtx_insn *
596f2b17 4400emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
23b2ce53 4401{
5f02387d
NF
4402 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4403 make_jump_insn_raw);
23b2ce53
RS
4404}
4405
2f937369 4406/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4407 and output it before the instruction BEFORE. */
4408
cd459bf8 4409rtx_insn *
596f2b17 4410emit_call_insn_before_noloc (rtx x, rtx_insn *before)
969d70ca 4411{
5f02387d
NF
4412 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4413 make_call_insn_raw);
969d70ca
JH
4414}
4415
b5b8b0ac
AO
4416/* Make an instruction with body X and code DEBUG_INSN
4417 and output it before the instruction BEFORE. */
4418
cd459bf8 4419rtx_insn *
b5b8b0ac
AO
4420emit_debug_insn_before_noloc (rtx x, rtx before)
4421{
5f02387d
NF
4422 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4423 make_debug_insn_raw);
b5b8b0ac
AO
4424}
4425
23b2ce53 4426/* Make an insn of code BARRIER
e881bb1b 4427 and output it before the insn BEFORE. */
23b2ce53 4428
cd459bf8 4429rtx_barrier *
502b8322 4430emit_barrier_before (rtx before)
23b2ce53 4431{
cd459bf8 4432 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4433
4434 INSN_UID (insn) = cur_insn_uid++;
4435
6fb5fa3c 4436 add_insn_before (insn, before, NULL);
23b2ce53
RS
4437 return insn;
4438}
4439
e881bb1b
RH
4440/* Emit the label LABEL before the insn BEFORE. */
4441
cd459bf8 4442rtx_insn *
596f2b17 4443emit_label_before (rtx label, rtx_insn *before)
e881bb1b 4444{
468660d3
SB
4445 gcc_checking_assert (INSN_UID (label) == 0);
4446 INSN_UID (label) = cur_insn_uid++;
4447 add_insn_before (label, before, NULL);
cd459bf8 4448 return as_a <rtx_insn *> (label);
e881bb1b 4449}
23b2ce53 4450\f
2f937369
DM
4451/* Helper for emit_insn_after, handles lists of instructions
4452 efficiently. */
23b2ce53 4453
e6eda746
DM
4454static rtx_insn *
4455emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
23b2ce53 4456{
e6eda746 4457 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
1130d5e3
DM
4458 rtx_insn *last;
4459 rtx_insn *after_after;
6fb5fa3c
DB
4460 if (!bb && !BARRIER_P (after))
4461 bb = BLOCK_FOR_INSN (after);
23b2ce53 4462
6fb5fa3c 4463 if (bb)
23b2ce53 4464 {
6fb5fa3c 4465 df_set_bb_dirty (bb);
2f937369 4466 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4467 if (!BARRIER_P (last))
6fb5fa3c
DB
4468 {
4469 set_block_for_insn (last, bb);
4470 df_insn_rescan (last);
4471 }
4b4bf941 4472 if (!BARRIER_P (last))
6fb5fa3c
DB
4473 {
4474 set_block_for_insn (last, bb);
4475 df_insn_rescan (last);
4476 }
a813c111 4477 if (BB_END (bb) == after)
1130d5e3 4478 BB_END (bb) = last;
23b2ce53
RS
4479 }
4480 else
2f937369
DM
4481 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4482 continue;
4483
4484 after_after = NEXT_INSN (after);
4485
0f82e5c9
DM
4486 SET_NEXT_INSN (after) = first;
4487 SET_PREV_INSN (first) = after;
4488 SET_NEXT_INSN (last) = after_after;
2f937369 4489 if (after_after)
0f82e5c9 4490 SET_PREV_INSN (after_after) = last;
2f937369 4491
c3284718 4492 if (after == get_last_insn ())
5936d944 4493 set_last_insn (last);
e855c69d 4494
2f937369
DM
4495 return last;
4496}
4497
cd459bf8 4498static rtx_insn *
e6eda746 4499emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
167b9fae 4500 rtx_insn *(*make_raw)(rtx))
2f937369 4501{
e6eda746
DM
4502 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4503 rtx_insn *last = after;
2f937369 4504
5b0264cb 4505 gcc_assert (after);
2f937369
DM
4506
4507 if (x == NULL_RTX)
e6eda746 4508 return last;
2f937369
DM
4509
4510 switch (GET_CODE (x))
23b2ce53 4511 {
b5b8b0ac 4512 case DEBUG_INSN:
2f937369
DM
4513 case INSN:
4514 case JUMP_INSN:
4515 case CALL_INSN:
4516 case CODE_LABEL:
4517 case BARRIER:
4518 case NOTE:
1130d5e3 4519 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4520 break;
4521
4522#ifdef ENABLE_RTL_CHECKING
4523 case SEQUENCE:
5b0264cb 4524 gcc_unreachable ();
2f937369
DM
4525 break;
4526#endif
4527
4528 default:
5f02387d 4529 last = (*make_raw) (x);
6fb5fa3c 4530 add_insn_after (last, after, bb);
2f937369 4531 break;
23b2ce53
RS
4532 }
4533
e6eda746 4534 return last;
23b2ce53
RS
4535}
4536
5f02387d
NF
4537/* Make X be output after the insn AFTER and set the BB of insn. If
4538 BB is NULL, an attempt is made to infer the BB from AFTER. */
4539
cd459bf8 4540rtx_insn *
5f02387d
NF
4541emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4542{
4543 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4544}
4545
255680cf 4546
2f937369 4547/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4548 and output it after the insn AFTER. */
4549
cd459bf8 4550rtx_insn *
a7102479 4551emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4552{
5f02387d 4553 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
2f937369
DM
4554}
4555
4556/* Make an instruction with body X and code CALL_INSN
4557 and output it after the instruction AFTER. */
4558
cd459bf8 4559rtx_insn *
a7102479 4560emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4561{
5f02387d 4562 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4563}
4564
b5b8b0ac
AO
4565/* Make an instruction with body X and code CALL_INSN
4566 and output it after the instruction AFTER. */
4567
cd459bf8 4568rtx_insn *
b5b8b0ac
AO
4569emit_debug_insn_after_noloc (rtx x, rtx after)
4570{
5f02387d 4571 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4572}
4573
23b2ce53
RS
4574/* Make an insn of code BARRIER
4575 and output it after the insn AFTER. */
4576
cd459bf8 4577rtx_barrier *
502b8322 4578emit_barrier_after (rtx after)
23b2ce53 4579{
cd459bf8 4580 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4581
4582 INSN_UID (insn) = cur_insn_uid++;
4583
6fb5fa3c 4584 add_insn_after (insn, after, NULL);
23b2ce53
RS
4585 return insn;
4586}
4587
4588/* Emit the label LABEL after the insn AFTER. */
4589
cd459bf8 4590rtx_insn *
596f2b17 4591emit_label_after (rtx label, rtx_insn *after)
23b2ce53 4592{
468660d3
SB
4593 gcc_checking_assert (INSN_UID (label) == 0);
4594 INSN_UID (label) = cur_insn_uid++;
4595 add_insn_after (label, after, NULL);
cd459bf8 4596 return as_a <rtx_insn *> (label);
23b2ce53 4597}
96fba521
SB
4598\f
4599/* Notes require a bit of special handling: Some notes need to have their
4600 BLOCK_FOR_INSN set, others should never have it set, and some should
4601 have it set or clear depending on the context. */
4602
4603/* Return true iff a note of kind SUBTYPE should be emitted with routines
4604 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4605 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4606
4607static bool
4608note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4609{
4610 switch (subtype)
4611 {
4612 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4613 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4614 return true;
4615
4616 /* Notes for var tracking and EH region markers can appear between or
4617 inside basic blocks. If the caller is emitting on the basic block
4618 boundary, do not set BLOCK_FOR_INSN on the new note. */
4619 case NOTE_INSN_VAR_LOCATION:
4620 case NOTE_INSN_CALL_ARG_LOCATION:
4621 case NOTE_INSN_EH_REGION_BEG:
4622 case NOTE_INSN_EH_REGION_END:
4623 return on_bb_boundary_p;
4624
4625 /* Otherwise, BLOCK_FOR_INSN must be set. */
4626 default:
4627 return false;
4628 }
4629}
23b2ce53
RS
4630
4631/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4632
66e8df53 4633rtx_note *
9152e0aa 4634emit_note_after (enum insn_note subtype, rtx uncast_after)
23b2ce53 4635{
9152e0aa 4636 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
66e8df53 4637 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4638 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4639 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4640
4641 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4642 add_insn_after_nobb (note, after);
4643 else
4644 add_insn_after (note, after, bb);
4645 return note;
4646}
4647
4648/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4649
66e8df53 4650rtx_note *
9152e0aa 4651emit_note_before (enum insn_note subtype, rtx uncast_before)
96fba521 4652{
9152e0aa 4653 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
66e8df53 4654 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4655 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4656 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4657
4658 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4659 add_insn_before_nobb (note, before);
4660 else
4661 add_insn_before (note, before, bb);
23b2ce53
RS
4662 return note;
4663}
23b2ce53 4664\f
e8110d6f
NF
4665/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4666 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4667
cd459bf8 4668static rtx_insn *
dc01c3d1 4669emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
167b9fae 4670 rtx_insn *(*make_raw) (rtx))
0d682900 4671{
dc01c3d1 4672 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
e8110d6f 4673 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4674
a7102479 4675 if (pattern == NULL_RTX || !loc)
cd459bf8 4676 return safe_as_a <rtx_insn *> (last);
dd3adcf8 4677
2f937369
DM
4678 after = NEXT_INSN (after);
4679 while (1)
4680 {
5368224f
DC
4681 if (active_insn_p (after) && !INSN_LOCATION (after))
4682 INSN_LOCATION (after) = loc;
2f937369
DM
4683 if (after == last)
4684 break;
4685 after = NEXT_INSN (after);
4686 }
cd459bf8 4687 return safe_as_a <rtx_insn *> (last);
0d682900
JH
4688}
4689
e8110d6f
NF
4690/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4691 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4692 any DEBUG_INSNs. */
4693
cd459bf8 4694static rtx_insn *
dc01c3d1 4695emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
167b9fae 4696 rtx_insn *(*make_raw) (rtx))
a7102479 4697{
dc01c3d1
DM
4698 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4699 rtx_insn *prev = after;
b5b8b0ac 4700
e8110d6f
NF
4701 if (skip_debug_insns)
4702 while (DEBUG_INSN_P (prev))
4703 prev = PREV_INSN (prev);
b5b8b0ac
AO
4704
4705 if (INSN_P (prev))
5368224f 4706 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4707 make_raw);
a7102479 4708 else
e8110d6f 4709 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4710}
4711
5368224f 4712/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4713rtx_insn *
e8110d6f 4714emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4715{
e8110d6f
NF
4716 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4717}
2f937369 4718
5368224f 4719/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4720rtx_insn *
e8110d6f
NF
4721emit_insn_after (rtx pattern, rtx after)
4722{
4723 return emit_pattern_after (pattern, after, true, make_insn_raw);
4724}
dd3adcf8 4725
5368224f 4726/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4727rtx_insn *
e8110d6f
NF
4728emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4729{
4730 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
0d682900
JH
4731}
4732
5368224f 4733/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4734rtx_insn *
a7102479
JH
4735emit_jump_insn_after (rtx pattern, rtx after)
4736{
e8110d6f 4737 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
a7102479
JH
4738}
4739
5368224f 4740/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4741rtx_insn *
502b8322 4742emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4743{
e8110d6f 4744 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4745}
4746
5368224f 4747/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4748rtx_insn *
a7102479
JH
4749emit_call_insn_after (rtx pattern, rtx after)
4750{
e8110d6f 4751 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4752}
4753
5368224f 4754/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4755rtx_insn *
b5b8b0ac
AO
4756emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4757{
e8110d6f 4758 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4759}
4760
5368224f 4761/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4762rtx_insn *
b5b8b0ac
AO
4763emit_debug_insn_after (rtx pattern, rtx after)
4764{
e8110d6f 4765 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4766}
4767
e8110d6f
NF
4768/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4769 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4770 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4771 CALL_INSN, etc. */
4772
cd459bf8 4773static rtx_insn *
dc01c3d1 4774emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
167b9fae 4775 rtx_insn *(*make_raw) (rtx))
0d682900 4776{
dc01c3d1
DM
4777 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4778 rtx_insn *first = PREV_INSN (before);
4779 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4780 insnp ? before : NULL_RTX,
4781 NULL, make_raw);
a7102479
JH
4782
4783 if (pattern == NULL_RTX || !loc)
dc01c3d1 4784 return last;
a7102479 4785
26cb3993
JH
4786 if (!first)
4787 first = get_insns ();
4788 else
4789 first = NEXT_INSN (first);
a7102479
JH
4790 while (1)
4791 {
5368224f
DC
4792 if (active_insn_p (first) && !INSN_LOCATION (first))
4793 INSN_LOCATION (first) = loc;
a7102479
JH
4794 if (first == last)
4795 break;
4796 first = NEXT_INSN (first);
4797 }
dc01c3d1 4798 return last;
a7102479
JH
4799}
4800
e8110d6f
NF
4801/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4802 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4803 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4804 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4805
cd459bf8 4806static rtx_insn *
dc01c3d1 4807emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
167b9fae 4808 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 4809{
dc01c3d1
DM
4810 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4811 rtx_insn *next = before;
b5b8b0ac 4812
e8110d6f
NF
4813 if (skip_debug_insns)
4814 while (DEBUG_INSN_P (next))
4815 next = PREV_INSN (next);
b5b8b0ac
AO
4816
4817 if (INSN_P (next))
5368224f 4818 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4819 insnp, make_raw);
a7102479 4820 else
e8110d6f
NF
4821 return emit_pattern_before_noloc (pattern, before,
4822 insnp ? before : NULL_RTX,
4823 NULL, make_raw);
a7102479
JH
4824}
4825
5368224f 4826/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4827rtx_insn *
596f2b17 4828emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4829{
e8110d6f
NF
4830 return emit_pattern_before_setloc (pattern, before, loc, true,
4831 make_insn_raw);
4832}
a7102479 4833
5368224f 4834/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4835rtx_insn *
e8110d6f
NF
4836emit_insn_before (rtx pattern, rtx before)
4837{
4838 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4839}
a7102479 4840
5368224f 4841/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4842rtx_insn *
596f2b17 4843emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
e8110d6f
NF
4844{
4845 return emit_pattern_before_setloc (pattern, before, loc, false,
4846 make_jump_insn_raw);
a7102479
JH
4847}
4848
5368224f 4849/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4850rtx_insn *
a7102479
JH
4851emit_jump_insn_before (rtx pattern, rtx before)
4852{
e8110d6f
NF
4853 return emit_pattern_before (pattern, before, true, false,
4854 make_jump_insn_raw);
a7102479
JH
4855}
4856
5368224f 4857/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4858rtx_insn *
596f2b17 4859emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4860{
e8110d6f
NF
4861 return emit_pattern_before_setloc (pattern, before, loc, false,
4862 make_call_insn_raw);
0d682900 4863}
a7102479 4864
e8110d6f 4865/* Like emit_call_insn_before_noloc,
5368224f 4866 but set insn_location according to BEFORE. */
cd459bf8 4867rtx_insn *
596f2b17 4868emit_call_insn_before (rtx pattern, rtx_insn *before)
a7102479 4869{
e8110d6f
NF
4870 return emit_pattern_before (pattern, before, true, false,
4871 make_call_insn_raw);
a7102479 4872}
b5b8b0ac 4873
5368224f 4874/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4875rtx_insn *
b5b8b0ac
AO
4876emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4877{
e8110d6f
NF
4878 return emit_pattern_before_setloc (pattern, before, loc, false,
4879 make_debug_insn_raw);
b5b8b0ac
AO
4880}
4881
e8110d6f 4882/* Like emit_debug_insn_before_noloc,
5368224f 4883 but set insn_location according to BEFORE. */
cd459bf8 4884rtx_insn *
b5b8b0ac
AO
4885emit_debug_insn_before (rtx pattern, rtx before)
4886{
e8110d6f
NF
4887 return emit_pattern_before (pattern, before, false, false,
4888 make_debug_insn_raw);
b5b8b0ac 4889}
0d682900 4890\f
2f937369
DM
4891/* Take X and emit it at the end of the doubly-linked
4892 INSN list.
23b2ce53
RS
4893
4894 Returns the last insn emitted. */
4895
cd459bf8 4896rtx_insn *
502b8322 4897emit_insn (rtx x)
23b2ce53 4898{
cd459bf8
DM
4899 rtx_insn *last = get_last_insn ();
4900 rtx_insn *insn;
23b2ce53 4901
2f937369
DM
4902 if (x == NULL_RTX)
4903 return last;
23b2ce53 4904
2f937369
DM
4905 switch (GET_CODE (x))
4906 {
b5b8b0ac 4907 case DEBUG_INSN:
2f937369
DM
4908 case INSN:
4909 case JUMP_INSN:
4910 case CALL_INSN:
4911 case CODE_LABEL:
4912 case BARRIER:
4913 case NOTE:
cd459bf8 4914 insn = as_a <rtx_insn *> (x);
2f937369 4915 while (insn)
23b2ce53 4916 {
cd459bf8 4917 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 4918 add_insn (insn);
2f937369
DM
4919 last = insn;
4920 insn = next;
23b2ce53 4921 }
2f937369 4922 break;
23b2ce53 4923
2f937369 4924#ifdef ENABLE_RTL_CHECKING
39718607 4925 case JUMP_TABLE_DATA:
2f937369 4926 case SEQUENCE:
5b0264cb 4927 gcc_unreachable ();
2f937369
DM
4928 break;
4929#endif
23b2ce53 4930
2f937369
DM
4931 default:
4932 last = make_insn_raw (x);
4933 add_insn (last);
4934 break;
23b2ce53
RS
4935 }
4936
4937 return last;
4938}
4939
b5b8b0ac
AO
4940/* Make an insn of code DEBUG_INSN with pattern X
4941 and add it to the end of the doubly-linked list. */
4942
cd459bf8 4943rtx_insn *
b5b8b0ac
AO
4944emit_debug_insn (rtx x)
4945{
cd459bf8
DM
4946 rtx_insn *last = get_last_insn ();
4947 rtx_insn *insn;
b5b8b0ac
AO
4948
4949 if (x == NULL_RTX)
4950 return last;
4951
4952 switch (GET_CODE (x))
4953 {
4954 case DEBUG_INSN:
4955 case INSN:
4956 case JUMP_INSN:
4957 case CALL_INSN:
4958 case CODE_LABEL:
4959 case BARRIER:
4960 case NOTE:
cd459bf8 4961 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
4962 while (insn)
4963 {
cd459bf8 4964 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
4965 add_insn (insn);
4966 last = insn;
4967 insn = next;
4968 }
4969 break;
4970
4971#ifdef ENABLE_RTL_CHECKING
39718607 4972 case JUMP_TABLE_DATA:
b5b8b0ac
AO
4973 case SEQUENCE:
4974 gcc_unreachable ();
4975 break;
4976#endif
4977
4978 default:
4979 last = make_debug_insn_raw (x);
4980 add_insn (last);
4981 break;
4982 }
4983
4984 return last;
4985}
4986
2f937369
DM
4987/* Make an insn of code JUMP_INSN with pattern X
4988 and add it to the end of the doubly-linked list. */
23b2ce53 4989
cd459bf8 4990rtx_insn *
502b8322 4991emit_jump_insn (rtx x)
23b2ce53 4992{
cd459bf8
DM
4993 rtx_insn *last = NULL;
4994 rtx_insn *insn;
23b2ce53 4995
2f937369 4996 switch (GET_CODE (x))
23b2ce53 4997 {
b5b8b0ac 4998 case DEBUG_INSN:
2f937369
DM
4999 case INSN:
5000 case JUMP_INSN:
5001 case CALL_INSN:
5002 case CODE_LABEL:
5003 case BARRIER:
5004 case NOTE:
cd459bf8 5005 insn = as_a <rtx_insn *> (x);
2f937369
DM
5006 while (insn)
5007 {
cd459bf8 5008 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5009 add_insn (insn);
5010 last = insn;
5011 insn = next;
5012 }
5013 break;
e0a5c5eb 5014
2f937369 5015#ifdef ENABLE_RTL_CHECKING
39718607 5016 case JUMP_TABLE_DATA:
2f937369 5017 case SEQUENCE:
5b0264cb 5018 gcc_unreachable ();
2f937369
DM
5019 break;
5020#endif
e0a5c5eb 5021
2f937369
DM
5022 default:
5023 last = make_jump_insn_raw (x);
5024 add_insn (last);
5025 break;
3c030e88 5026 }
e0a5c5eb
RS
5027
5028 return last;
5029}
5030
2f937369 5031/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5032 and add it to the end of the doubly-linked list. */
5033
cd459bf8 5034rtx_insn *
502b8322 5035emit_call_insn (rtx x)
23b2ce53 5036{
cd459bf8 5037 rtx_insn *insn;
2f937369
DM
5038
5039 switch (GET_CODE (x))
23b2ce53 5040 {
b5b8b0ac 5041 case DEBUG_INSN:
2f937369
DM
5042 case INSN:
5043 case JUMP_INSN:
5044 case CALL_INSN:
5045 case CODE_LABEL:
5046 case BARRIER:
5047 case NOTE:
5048 insn = emit_insn (x);
5049 break;
23b2ce53 5050
2f937369
DM
5051#ifdef ENABLE_RTL_CHECKING
5052 case SEQUENCE:
39718607 5053 case JUMP_TABLE_DATA:
5b0264cb 5054 gcc_unreachable ();
2f937369
DM
5055 break;
5056#endif
23b2ce53 5057
2f937369
DM
5058 default:
5059 insn = make_call_insn_raw (x);
23b2ce53 5060 add_insn (insn);
2f937369 5061 break;
23b2ce53 5062 }
2f937369
DM
5063
5064 return insn;
23b2ce53
RS
5065}
5066
5067/* Add the label LABEL to the end of the doubly-linked list. */
5068
cd459bf8 5069rtx_insn *
502b8322 5070emit_label (rtx label)
23b2ce53 5071{
468660d3
SB
5072 gcc_checking_assert (INSN_UID (label) == 0);
5073 INSN_UID (label) = cur_insn_uid++;
9152e0aa 5074 add_insn (as_a <rtx_insn *> (label));
cd459bf8 5075 return as_a <rtx_insn *> (label);
23b2ce53
RS
5076}
5077
39718607
SB
5078/* Make an insn of code JUMP_TABLE_DATA
5079 and add it to the end of the doubly-linked list. */
5080
4598afdd 5081rtx_jump_table_data *
39718607
SB
5082emit_jump_table_data (rtx table)
5083{
4598afdd
DM
5084 rtx_jump_table_data *jump_table_data =
5085 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5086 INSN_UID (jump_table_data) = cur_insn_uid++;
5087 PATTERN (jump_table_data) = table;
5088 BLOCK_FOR_INSN (jump_table_data) = NULL;
5089 add_insn (jump_table_data);
5090 return jump_table_data;
5091}
5092
23b2ce53
RS
5093/* Make an insn of code BARRIER
5094 and add it to the end of the doubly-linked list. */
5095
cd459bf8 5096rtx_barrier *
502b8322 5097emit_barrier (void)
23b2ce53 5098{
cd459bf8 5099 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5100 INSN_UID (barrier) = cur_insn_uid++;
5101 add_insn (barrier);
5102 return barrier;
5103}
5104
5f2fc772 5105/* Emit a copy of note ORIG. */
502b8322 5106
66e8df53
DM
5107rtx_note *
5108emit_note_copy (rtx_note *orig)
5f2fc772 5109{
96fba521 5110 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5111 rtx_note *note = make_note_raw (kind);
5f2fc772 5112 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5113 add_insn (note);
2e040219 5114 return note;
23b2ce53
RS
5115}
5116
2e040219
NS
5117/* Make an insn of code NOTE or type NOTE_NO
5118 and add it to the end of the doubly-linked list. */
23b2ce53 5119
66e8df53 5120rtx_note *
a38e7aa5 5121emit_note (enum insn_note kind)
23b2ce53 5122{
66e8df53 5123 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5124 add_insn (note);
5125 return note;
5126}
5127
c41c1387
RS
5128/* Emit a clobber of lvalue X. */
5129
cd459bf8 5130rtx_insn *
c41c1387
RS
5131emit_clobber (rtx x)
5132{
5133 /* CONCATs should not appear in the insn stream. */
5134 if (GET_CODE (x) == CONCAT)
5135 {
5136 emit_clobber (XEXP (x, 0));
5137 return emit_clobber (XEXP (x, 1));
5138 }
5139 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5140}
5141
5142/* Return a sequence of insns to clobber lvalue X. */
5143
cd459bf8 5144rtx_insn *
c41c1387
RS
5145gen_clobber (rtx x)
5146{
cd459bf8 5147 rtx_insn *seq;
c41c1387
RS
5148
5149 start_sequence ();
5150 emit_clobber (x);
5151 seq = get_insns ();
5152 end_sequence ();
5153 return seq;
5154}
5155
5156/* Emit a use of rvalue X. */
5157
cd459bf8 5158rtx_insn *
c41c1387
RS
5159emit_use (rtx x)
5160{
5161 /* CONCATs should not appear in the insn stream. */
5162 if (GET_CODE (x) == CONCAT)
5163 {
5164 emit_use (XEXP (x, 0));
5165 return emit_use (XEXP (x, 1));
5166 }
5167 return emit_insn (gen_rtx_USE (VOIDmode, x));
5168}
5169
5170/* Return a sequence of insns to use rvalue X. */
5171
cd459bf8 5172rtx_insn *
c41c1387
RS
5173gen_use (rtx x)
5174{
cd459bf8 5175 rtx_insn *seq;
c41c1387
RS
5176
5177 start_sequence ();
5178 emit_use (x);
5179 seq = get_insns ();
5180 end_sequence ();
5181 return seq;
5182}
5183
c8912e53
RS
5184/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5185 Return the set in INSN that such notes describe, or NULL if the notes
5186 have no meaning for INSN. */
5187
5188rtx
5189set_for_reg_notes (rtx insn)
5190{
5191 rtx pat, reg;
5192
5193 if (!INSN_P (insn))
5194 return NULL_RTX;
5195
5196 pat = PATTERN (insn);
5197 if (GET_CODE (pat) == PARALLEL)
5198 {
5199 /* We do not use single_set because that ignores SETs of unused
5200 registers. REG_EQUAL and REG_EQUIV notes really do require the
5201 PARALLEL to have a single SET. */
5202 if (multiple_sets (insn))
5203 return NULL_RTX;
5204 pat = XVECEXP (pat, 0, 0);
5205 }
5206
5207 if (GET_CODE (pat) != SET)
5208 return NULL_RTX;
5209
5210 reg = SET_DEST (pat);
5211
5212 /* Notes apply to the contents of a STRICT_LOW_PART. */
5213 if (GET_CODE (reg) == STRICT_LOW_PART)
5214 reg = XEXP (reg, 0);
5215
5216 /* Check that we have a register. */
5217 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5218 return NULL_RTX;
5219
5220 return pat;
5221}
5222
87b47c85 5223/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5224 note of this type already exists, remove it first. */
87b47c85 5225
3d238248 5226rtx
502b8322 5227set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5228{
5229 rtx note = find_reg_note (insn, kind, NULL_RTX);
5230
52488da1
JW
5231 switch (kind)
5232 {
5233 case REG_EQUAL:
5234 case REG_EQUIV:
c8912e53
RS
5235 if (!set_for_reg_notes (insn))
5236 return NULL_RTX;
52488da1
JW
5237
5238 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5239 It serves no useful purpose and breaks eliminate_regs. */
5240 if (GET_CODE (datum) == ASM_OPERANDS)
5241 return NULL_RTX;
109374e2
RS
5242
5243 /* Notes with side effects are dangerous. Even if the side-effect
5244 initially mirrors one in PATTERN (INSN), later optimizations
5245 might alter the way that the final register value is calculated
5246 and so move or alter the side-effect in some way. The note would
5247 then no longer be a valid substitution for SET_SRC. */
5248 if (side_effects_p (datum))
5249 return NULL_RTX;
52488da1
JW
5250 break;
5251
5252 default:
5253 break;
5254 }
3d238248 5255
c8912e53
RS
5256 if (note)
5257 XEXP (note, 0) = datum;
5258 else
5259 {
5260 add_reg_note (insn, kind, datum);
5261 note = REG_NOTES (insn);
5262 }
6fb5fa3c
DB
5263
5264 switch (kind)
3d238248 5265 {
6fb5fa3c
DB
5266 case REG_EQUAL:
5267 case REG_EQUIV:
b2908ba6 5268 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5269 break;
5270 default:
5271 break;
3d238248 5272 }
87b47c85 5273
c8912e53 5274 return note;
87b47c85 5275}
7543f918
JR
5276
5277/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5278rtx
5279set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5280{
c8912e53 5281 rtx set = set_for_reg_notes (insn);
7543f918
JR
5282
5283 if (set && SET_DEST (set) == dst)
5284 return set_unique_reg_note (insn, kind, datum);
5285 return NULL_RTX;
5286}
23b2ce53
RS
5287\f
5288/* Return an indication of which type of insn should have X as a body.
5289 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5290
d78db459 5291static enum rtx_code
502b8322 5292classify_insn (rtx x)
23b2ce53 5293{
4b4bf941 5294 if (LABEL_P (x))
23b2ce53
RS
5295 return CODE_LABEL;
5296 if (GET_CODE (x) == CALL)
5297 return CALL_INSN;
26898771 5298 if (ANY_RETURN_P (x))
23b2ce53
RS
5299 return JUMP_INSN;
5300 if (GET_CODE (x) == SET)
5301 {
5302 if (SET_DEST (x) == pc_rtx)
5303 return JUMP_INSN;
5304 else if (GET_CODE (SET_SRC (x)) == CALL)
5305 return CALL_INSN;
5306 else
5307 return INSN;
5308 }
5309 if (GET_CODE (x) == PARALLEL)
5310 {
b3694847 5311 int j;
23b2ce53
RS
5312 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5313 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5314 return CALL_INSN;
5315 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5316 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5317 return JUMP_INSN;
5318 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5319 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5320 return CALL_INSN;
5321 }
5322 return INSN;
5323}
5324
5325/* Emit the rtl pattern X as an appropriate kind of insn.
5326 If X is a label, it is simply added into the insn chain. */
5327
cd459bf8 5328rtx_insn *
502b8322 5329emit (rtx x)
23b2ce53
RS
5330{
5331 enum rtx_code code = classify_insn (x);
5332
5b0264cb 5333 switch (code)
23b2ce53 5334 {
5b0264cb
NS
5335 case CODE_LABEL:
5336 return emit_label (x);
5337 case INSN:
5338 return emit_insn (x);
5339 case JUMP_INSN:
5340 {
cd459bf8 5341 rtx_insn *insn = emit_jump_insn (x);
5b0264cb
NS
5342 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5343 return emit_barrier ();
5344 return insn;
5345 }
5346 case CALL_INSN:
5347 return emit_call_insn (x);
b5b8b0ac
AO
5348 case DEBUG_INSN:
5349 return emit_debug_insn (x);
5b0264cb
NS
5350 default:
5351 gcc_unreachable ();
23b2ce53 5352 }
23b2ce53
RS
5353}
5354\f
e2500fed 5355/* Space for free sequence stack entries. */
1431042e 5356static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5357
4dfa0342
RH
5358/* Begin emitting insns to a sequence. If this sequence will contain
5359 something that might cause the compiler to pop arguments to function
5360 calls (because those pops have previously been deferred; see
5361 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5362 before calling this function. That will ensure that the deferred
5363 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5364
5365void
502b8322 5366start_sequence (void)
23b2ce53
RS
5367{
5368 struct sequence_stack *tem;
5369
e2500fed
GK
5370 if (free_sequence_stack != NULL)
5371 {
5372 tem = free_sequence_stack;
5373 free_sequence_stack = tem->next;
5374 }
5375 else
766090c2 5376 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5377
49ad7cfa 5378 tem->next = seq_stack;
5936d944
JH
5379 tem->first = get_insns ();
5380 tem->last = get_last_insn ();
23b2ce53 5381
49ad7cfa 5382 seq_stack = tem;
23b2ce53 5383
5936d944
JH
5384 set_first_insn (0);
5385 set_last_insn (0);
23b2ce53
RS
5386}
5387
5c7a310f
MM
5388/* Set up the insn chain starting with FIRST as the current sequence,
5389 saving the previously current one. See the documentation for
5390 start_sequence for more information about how to use this function. */
23b2ce53
RS
5391
5392void
fee3e72c 5393push_to_sequence (rtx_insn *first)
23b2ce53 5394{
fee3e72c 5395 rtx_insn *last;
23b2ce53
RS
5396
5397 start_sequence ();
5398
e84a58ff
EB
5399 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5400 ;
23b2ce53 5401
5936d944
JH
5402 set_first_insn (first);
5403 set_last_insn (last);
23b2ce53
RS
5404}
5405
bb27eeda
SE
5406/* Like push_to_sequence, but take the last insn as an argument to avoid
5407 looping through the list. */
5408
5409void
fee3e72c 5410push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5411{
5412 start_sequence ();
5413
5936d944
JH
5414 set_first_insn (first);
5415 set_last_insn (last);
bb27eeda
SE
5416}
5417
f15ae3a1
TW
5418/* Set up the outer-level insn chain
5419 as the current sequence, saving the previously current one. */
5420
5421void
502b8322 5422push_topmost_sequence (void)
f15ae3a1 5423{
aefdd5ab 5424 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5425
5426 start_sequence ();
5427
49ad7cfa 5428 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5429 top = stack;
5430
5936d944
JH
5431 set_first_insn (top->first);
5432 set_last_insn (top->last);
f15ae3a1
TW
5433}
5434
5435/* After emitting to the outer-level insn chain, update the outer-level
5436 insn chain, and restore the previous saved state. */
5437
5438void
502b8322 5439pop_topmost_sequence (void)
f15ae3a1 5440{
aefdd5ab 5441 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5442
49ad7cfa 5443 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5444 top = stack;
5445
5936d944
JH
5446 top->first = get_insns ();
5447 top->last = get_last_insn ();
f15ae3a1
TW
5448
5449 end_sequence ();
5450}
5451
23b2ce53
RS
5452/* After emitting to a sequence, restore previous saved state.
5453
5c7a310f 5454 To get the contents of the sequence just made, you must call
2f937369 5455 `get_insns' *before* calling here.
5c7a310f
MM
5456
5457 If the compiler might have deferred popping arguments while
5458 generating this sequence, and this sequence will not be immediately
5459 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5460 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5461 pops are inserted into this sequence, and not into some random
5462 location in the instruction stream. See INHIBIT_DEFER_POP for more
5463 information about deferred popping of arguments. */
23b2ce53
RS
5464
5465void
502b8322 5466end_sequence (void)
23b2ce53 5467{
49ad7cfa 5468 struct sequence_stack *tem = seq_stack;
23b2ce53 5469
5936d944
JH
5470 set_first_insn (tem->first);
5471 set_last_insn (tem->last);
49ad7cfa 5472 seq_stack = tem->next;
23b2ce53 5473
e2500fed
GK
5474 memset (tem, 0, sizeof (*tem));
5475 tem->next = free_sequence_stack;
5476 free_sequence_stack = tem;
23b2ce53
RS
5477}
5478
5479/* Return 1 if currently emitting into a sequence. */
5480
5481int
502b8322 5482in_sequence_p (void)
23b2ce53 5483{
49ad7cfa 5484 return seq_stack != 0;
23b2ce53 5485}
23b2ce53 5486\f
59ec66dc
MM
5487/* Put the various virtual registers into REGNO_REG_RTX. */
5488
2bbdec73 5489static void
bd60bab2 5490init_virtual_regs (void)
59ec66dc 5491{
bd60bab2
JH
5492 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5493 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5494 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5495 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5496 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5497 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5498 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5499}
5500
da43a810
BS
5501\f
5502/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5503static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5504static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5505static int copy_insn_n_scratches;
5506
5507/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5508 copied an ASM_OPERANDS.
5509 In that case, it is the original input-operand vector. */
5510static rtvec orig_asm_operands_vector;
5511
5512/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5513 copied an ASM_OPERANDS.
5514 In that case, it is the copied input-operand vector. */
5515static rtvec copy_asm_operands_vector;
5516
5517/* Likewise for the constraints vector. */
5518static rtvec orig_asm_constraints_vector;
5519static rtvec copy_asm_constraints_vector;
5520
5521/* Recursively create a new copy of an rtx for copy_insn.
5522 This function differs from copy_rtx in that it handles SCRATCHes and
5523 ASM_OPERANDs properly.
5524 Normally, this function is not used directly; use copy_insn as front end.
5525 However, you could first copy an insn pattern with copy_insn and then use
5526 this function afterwards to properly copy any REG_NOTEs containing
5527 SCRATCHes. */
5528
5529rtx
502b8322 5530copy_insn_1 (rtx orig)
da43a810 5531{
b3694847
SS
5532 rtx copy;
5533 int i, j;
5534 RTX_CODE code;
5535 const char *format_ptr;
da43a810 5536
cd9c1ca8
RH
5537 if (orig == NULL)
5538 return NULL;
5539
da43a810
BS
5540 code = GET_CODE (orig);
5541
5542 switch (code)
5543 {
5544 case REG:
a52a87c3 5545 case DEBUG_EXPR:
d8116890 5546 CASE_CONST_ANY:
da43a810
BS
5547 case SYMBOL_REF:
5548 case CODE_LABEL:
5549 case PC:
5550 case CC0:
276e0224 5551 case RETURN:
26898771 5552 case SIMPLE_RETURN:
da43a810 5553 return orig;
3e89ed8d 5554 case CLOBBER:
c5c5ba89
JH
5555 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5556 clobbers or clobbers of hard registers that originated as pseudos.
5557 This is needed to allow safe register renaming. */
5558 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5559 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
3e89ed8d
JH
5560 return orig;
5561 break;
da43a810
BS
5562
5563 case SCRATCH:
5564 for (i = 0; i < copy_insn_n_scratches; i++)
5565 if (copy_insn_scratch_in[i] == orig)
5566 return copy_insn_scratch_out[i];
5567 break;
5568
5569 case CONST:
6fb5fa3c 5570 if (shared_const_p (orig))
da43a810
BS
5571 return orig;
5572 break;
750c9258 5573
da43a810
BS
5574 /* A MEM with a constant address is not sharable. The problem is that
5575 the constant address may need to be reloaded. If the mem is shared,
5576 then reloading one copy of this mem will cause all copies to appear
5577 to have been reloaded. */
5578
5579 default:
5580 break;
5581 }
5582
aacd3885
RS
5583 /* Copy the various flags, fields, and other information. We assume
5584 that all fields need copying, and then clear the fields that should
da43a810
BS
5585 not be copied. That is the sensible default behavior, and forces
5586 us to explicitly document why we are *not* copying a flag. */
aacd3885 5587 copy = shallow_copy_rtx (orig);
da43a810
BS
5588
5589 /* We do not copy the USED flag, which is used as a mark bit during
5590 walks over the RTL. */
2adc7f12 5591 RTX_FLAG (copy, used) = 0;
da43a810
BS
5592
5593 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5594 if (INSN_P (orig))
da43a810 5595 {
2adc7f12
JJ
5596 RTX_FLAG (copy, jump) = 0;
5597 RTX_FLAG (copy, call) = 0;
5598 RTX_FLAG (copy, frame_related) = 0;
da43a810 5599 }
750c9258 5600
da43a810
BS
5601 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5602
5603 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5604 switch (*format_ptr++)
5605 {
5606 case 'e':
5607 if (XEXP (orig, i) != NULL)
5608 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5609 break;
da43a810 5610
aacd3885
RS
5611 case 'E':
5612 case 'V':
5613 if (XVEC (orig, i) == orig_asm_constraints_vector)
5614 XVEC (copy, i) = copy_asm_constraints_vector;
5615 else if (XVEC (orig, i) == orig_asm_operands_vector)
5616 XVEC (copy, i) = copy_asm_operands_vector;
5617 else if (XVEC (orig, i) != NULL)
5618 {
5619 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5620 for (j = 0; j < XVECLEN (copy, i); j++)
5621 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5622 }
5623 break;
da43a810 5624
aacd3885
RS
5625 case 't':
5626 case 'w':
5627 case 'i':
5628 case 's':
5629 case 'S':
5630 case 'u':
5631 case '0':
5632 /* These are left unchanged. */
5633 break;
da43a810 5634
aacd3885
RS
5635 default:
5636 gcc_unreachable ();
5637 }
da43a810
BS
5638
5639 if (code == SCRATCH)
5640 {
5641 i = copy_insn_n_scratches++;
5b0264cb 5642 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5643 copy_insn_scratch_in[i] = orig;
5644 copy_insn_scratch_out[i] = copy;
5645 }
5646 else if (code == ASM_OPERANDS)
5647 {
6462bb43
AO
5648 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5649 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5650 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5651 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5652 }
5653
5654 return copy;
5655}
5656
5657/* Create a new copy of an rtx.
5658 This function differs from copy_rtx in that it handles SCRATCHes and
5659 ASM_OPERANDs properly.
5660 INSN doesn't really have to be a full INSN; it could be just the
5661 pattern. */
5662rtx
502b8322 5663copy_insn (rtx insn)
da43a810
BS
5664{
5665 copy_insn_n_scratches = 0;
5666 orig_asm_operands_vector = 0;
5667 orig_asm_constraints_vector = 0;
5668 copy_asm_operands_vector = 0;
5669 copy_asm_constraints_vector = 0;
5670 return copy_insn_1 (insn);
5671}
59ec66dc 5672
8e383849
JR
5673/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5674 on that assumption that INSN itself remains in its original place. */
5675
f8f0516e
DM
5676rtx_insn *
5677copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5678{
5679 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5680 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5681 INSN_UID (insn) = cur_insn_uid++;
5682 return insn;
5683}
5684
23b2ce53
RS
5685/* Initialize data structures and variables in this file
5686 before generating rtl for each function. */
5687
5688void
502b8322 5689init_emit (void)
23b2ce53 5690{
5936d944
JH
5691 set_first_insn (NULL);
5692 set_last_insn (NULL);
b5b8b0ac
AO
5693 if (MIN_NONDEBUG_INSN_UID)
5694 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5695 else
5696 cur_insn_uid = 1;
5697 cur_debug_insn_uid = 1;
23b2ce53 5698 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5699 first_label_num = label_num;
49ad7cfa 5700 seq_stack = NULL;
23b2ce53 5701
23b2ce53
RS
5702 /* Init the tables that describe all the pseudo regs. */
5703
3e029763 5704 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5705
3e029763 5706 crtl->emit.regno_pointer_align
1b4572a8 5707 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5708
766090c2 5709 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5710
e50126e8 5711 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5712 memcpy (regno_reg_rtx,
5fb0e246 5713 initial_regno_reg_rtx,
6cde4876 5714 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5715
23b2ce53 5716 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5717 init_virtual_regs ();
740ab4a2
RK
5718
5719 /* Indicate that the virtual registers and stack locations are
5720 all pointers. */
3502dc9c
JDA
5721 REG_POINTER (stack_pointer_rtx) = 1;
5722 REG_POINTER (frame_pointer_rtx) = 1;
5723 REG_POINTER (hard_frame_pointer_rtx) = 1;
5724 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5725
3502dc9c
JDA
5726 REG_POINTER (virtual_incoming_args_rtx) = 1;
5727 REG_POINTER (virtual_stack_vars_rtx) = 1;
5728 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5729 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5730 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5731
86fe05e0 5732#ifdef STACK_BOUNDARY
bdb429a5
RK
5733 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5734 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5735 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5736 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5737
5738 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5739 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5740 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5741 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5742 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5743#endif
5744
5e82e7bd
JVA
5745#ifdef INIT_EXPANDERS
5746 INIT_EXPANDERS;
5747#endif
23b2ce53
RS
5748}
5749
a73b091d 5750/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5751
5752static rtx
a73b091d 5753gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5754{
5755 rtx tem;
5756 rtvec v;
5757 int units, i;
5758 enum machine_mode inner;
5759
5760 units = GET_MODE_NUNITS (mode);
5761 inner = GET_MODE_INNER (mode);
5762
15ed7b52
JG
5763 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5764
69ef87e2
AH
5765 v = rtvec_alloc (units);
5766
a73b091d
JW
5767 /* We need to call this function after we set the scalar const_tiny_rtx
5768 entries. */
5769 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5770
5771 for (i = 0; i < units; ++i)
a73b091d 5772 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5773
a06e3c40 5774 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5775 return tem;
5776}
5777
a06e3c40 5778/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5779 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5780rtx
502b8322 5781gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5782{
a73b091d
JW
5783 enum machine_mode inner = GET_MODE_INNER (mode);
5784 int nunits = GET_MODE_NUNITS (mode);
5785 rtx x;
a06e3c40
R
5786 int i;
5787
a73b091d
JW
5788 /* Check to see if all of the elements have the same value. */
5789 x = RTVEC_ELT (v, nunits - 1);
5790 for (i = nunits - 2; i >= 0; i--)
5791 if (RTVEC_ELT (v, i) != x)
5792 break;
5793
5794 /* If the values are all the same, check to see if we can use one of the
5795 standard constant vectors. */
5796 if (i == -1)
5797 {
5798 if (x == CONST0_RTX (inner))
5799 return CONST0_RTX (mode);
5800 else if (x == CONST1_RTX (inner))
5801 return CONST1_RTX (mode);
e7c82a99
JJ
5802 else if (x == CONSTM1_RTX (inner))
5803 return CONSTM1_RTX (mode);
a73b091d
JW
5804 }
5805
5806 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5807}
5808
b5deb7b6
SL
5809/* Initialise global register information required by all functions. */
5810
5811void
5812init_emit_regs (void)
5813{
5814 int i;
1c3f523e
RS
5815 enum machine_mode mode;
5816 mem_attrs *attrs;
b5deb7b6
SL
5817
5818 /* Reset register attributes */
5819 htab_empty (reg_attrs_htab);
5820
5821 /* We need reg_raw_mode, so initialize the modes now. */
5822 init_reg_modes_target ();
5823
5824 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5825 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5826 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5827 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5828 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5829 virtual_incoming_args_rtx =
5830 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5831 virtual_stack_vars_rtx =
5832 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5833 virtual_stack_dynamic_rtx =
5834 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5835 virtual_outgoing_args_rtx =
5836 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5837 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5838 virtual_preferred_stack_boundary_rtx =
5839 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5840
5841 /* Initialize RTL for commonly used hard registers. These are
5842 copied into regno_reg_rtx as we begin to compile each function. */
5843 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5844 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5845
5846#ifdef RETURN_ADDRESS_POINTER_REGNUM
5847 return_address_pointer_rtx
5848 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5849#endif
5850
b5deb7b6
SL
5851 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5852 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5853 else
5854 pic_offset_table_rtx = NULL_RTX;
1c3f523e
RS
5855
5856 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5857 {
5858 mode = (enum machine_mode) i;
766090c2 5859 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
5860 attrs->align = BITS_PER_UNIT;
5861 attrs->addrspace = ADDR_SPACE_GENERIC;
5862 if (mode != BLKmode)
5863 {
754c3d5d
RS
5864 attrs->size_known_p = true;
5865 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5866 if (STRICT_ALIGNMENT)
5867 attrs->align = GET_MODE_ALIGNMENT (mode);
5868 }
5869 mode_mem_attrs[i] = attrs;
5870 }
b5deb7b6
SL
5871}
5872
aa3a12d6
RS
5873/* Initialize global machine_mode variables. */
5874
5875void
5876init_derived_machine_modes (void)
5877{
5878 byte_mode = VOIDmode;
5879 word_mode = VOIDmode;
5880
5881 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5882 mode != VOIDmode;
5883 mode = GET_MODE_WIDER_MODE (mode))
5884 {
5885 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5886 && byte_mode == VOIDmode)
5887 byte_mode = mode;
5888
5889 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5890 && word_mode == VOIDmode)
5891 word_mode = mode;
5892 }
5893
5894 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5895}
5896
2d888286 5897/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5898
5899void
2d888286 5900init_emit_once (void)
23b2ce53
RS
5901{
5902 int i;
5903 enum machine_mode mode;
9ec36da5 5904 enum machine_mode double_mode;
23b2ce53 5905
807e902e
KZ
5906 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5907 CONST_FIXED, and memory attribute hash tables. */
17211ab5
GK
5908 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5909 const_int_htab_eq, NULL);
173b24b9 5910
807e902e
KZ
5911#if TARGET_SUPPORTS_WIDE_INT
5912 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5913 const_wide_int_htab_eq, NULL);
5914#endif
17211ab5
GK
5915 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5916 const_double_htab_eq, NULL);
5692c7bc 5917
091a3ac7
CF
5918 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5919 const_fixed_htab_eq, NULL);
5920
a560d4d4
JH
5921 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5922 reg_attrs_htab_eq, NULL);
67673f5c 5923
5da077de 5924#ifdef INIT_EXPANDERS
414c4dc4
NC
5925 /* This is to initialize {init|mark|free}_machine_status before the first
5926 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5927 end which calls push_function_context_to before the first call to
5da077de
AS
5928 init_function_start. */
5929 INIT_EXPANDERS;
5930#endif
5931
23b2ce53
RS
5932 /* Create the unique rtx's for certain rtx codes and operand values. */
5933
a2a8cc44 5934 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5935 tries to use these variables. */
23b2ce53 5936 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5937 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5938 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5939
68d75312
JC
5940 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5941 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5942 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5943 else
3b80f6ca 5944 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5945
aa3a12d6
RS
5946 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5947
807e902e
KZ
5948 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5949 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5950 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
5951
5952 dconstm1 = dconst1;
5953 dconstm1.sign = 1;
03f2ea93
RS
5954
5955 dconsthalf = dconst1;
1e92bbb9 5956 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5957
e7c82a99 5958 for (i = 0; i < 3; i++)
23b2ce53 5959 {
aefa9d43 5960 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5961 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5962
15ed7b52
JG
5963 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5964 mode != VOIDmode;
5965 mode = GET_MODE_WIDER_MODE (mode))
5966 const_tiny_rtx[i][(int) mode] =
5967 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5968
5969 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5970 mode != VOIDmode;
23b2ce53 5971 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5972 const_tiny_rtx[i][(int) mode] =
5973 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5974
906c4e36 5975 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5976
15ed7b52
JG
5977 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5978 mode != VOIDmode;
23b2ce53 5979 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5980 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5981
ede6c734
MS
5982 for (mode = MIN_MODE_PARTIAL_INT;
5983 mode <= MAX_MODE_PARTIAL_INT;
5984 mode = (enum machine_mode)((int)(mode) + 1))
33d3e559 5985 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5986 }
5987
e7c82a99
JJ
5988 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5989
5990 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5991 mode != VOIDmode;
5992 mode = GET_MODE_WIDER_MODE (mode))
5993 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5994
ede6c734
MS
5995 for (mode = MIN_MODE_PARTIAL_INT;
5996 mode <= MAX_MODE_PARTIAL_INT;
5997 mode = (enum machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5998 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5999
e90721b1
AP
6000 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
6001 mode != VOIDmode;
6002 mode = GET_MODE_WIDER_MODE (mode))
6003 {
6004 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6005 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6006 }
6007
6008 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6009 mode != VOIDmode;
6010 mode = GET_MODE_WIDER_MODE (mode))
6011 {
6012 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6013 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6014 }
6015
69ef87e2
AH
6016 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6017 mode != VOIDmode;
6018 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
6019 {
6020 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6021 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 6022 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 6023 }
69ef87e2
AH
6024
6025 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6026 mode != VOIDmode;
6027 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
6028 {
6029 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6030 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6031 }
69ef87e2 6032
325217ed
CF
6033 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6034 mode != VOIDmode;
6035 mode = GET_MODE_WIDER_MODE (mode))
6036 {
c3284718
RS
6037 FCONST0 (mode).data.high = 0;
6038 FCONST0 (mode).data.low = 0;
6039 FCONST0 (mode).mode = mode;
091a3ac7
CF
6040 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6041 FCONST0 (mode), mode);
325217ed
CF
6042 }
6043
6044 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6045 mode != VOIDmode;
6046 mode = GET_MODE_WIDER_MODE (mode))
6047 {
c3284718
RS
6048 FCONST0 (mode).data.high = 0;
6049 FCONST0 (mode).data.low = 0;
6050 FCONST0 (mode).mode = mode;
091a3ac7
CF
6051 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6052 FCONST0 (mode), mode);
325217ed
CF
6053 }
6054
6055 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6056 mode != VOIDmode;
6057 mode = GET_MODE_WIDER_MODE (mode))
6058 {
c3284718
RS
6059 FCONST0 (mode).data.high = 0;
6060 FCONST0 (mode).data.low = 0;
6061 FCONST0 (mode).mode = mode;
091a3ac7
CF
6062 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6063 FCONST0 (mode), mode);
325217ed
CF
6064
6065 /* We store the value 1. */
c3284718
RS
6066 FCONST1 (mode).data.high = 0;
6067 FCONST1 (mode).data.low = 0;
6068 FCONST1 (mode).mode = mode;
6069 FCONST1 (mode).data
9be0ac8c
LC
6070 = double_int_one.lshift (GET_MODE_FBIT (mode),
6071 HOST_BITS_PER_DOUBLE_INT,
6072 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6073 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6074 FCONST1 (mode), mode);
325217ed
CF
6075 }
6076
6077 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6078 mode != VOIDmode;
6079 mode = GET_MODE_WIDER_MODE (mode))
6080 {
c3284718
RS
6081 FCONST0 (mode).data.high = 0;
6082 FCONST0 (mode).data.low = 0;
6083 FCONST0 (mode).mode = mode;
091a3ac7
CF
6084 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6085 FCONST0 (mode), mode);
325217ed
CF
6086
6087 /* We store the value 1. */
c3284718
RS
6088 FCONST1 (mode).data.high = 0;
6089 FCONST1 (mode).data.low = 0;
6090 FCONST1 (mode).mode = mode;
6091 FCONST1 (mode).data
9be0ac8c
LC
6092 = double_int_one.lshift (GET_MODE_FBIT (mode),
6093 HOST_BITS_PER_DOUBLE_INT,
6094 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6095 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6096 FCONST1 (mode), mode);
6097 }
6098
6099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6100 mode != VOIDmode;
6101 mode = GET_MODE_WIDER_MODE (mode))
6102 {
6103 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6104 }
6105
6106 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6107 mode != VOIDmode;
6108 mode = GET_MODE_WIDER_MODE (mode))
6109 {
6110 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6111 }
6112
6113 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6114 mode != VOIDmode;
6115 mode = GET_MODE_WIDER_MODE (mode))
6116 {
6117 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6118 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6119 }
6120
6121 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6122 mode != VOIDmode;
6123 mode = GET_MODE_WIDER_MODE (mode))
6124 {
6125 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6126 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6127 }
6128
dbbbbf3b
JDA
6129 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6130 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6131 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6132
f0417c82
RH
6133 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6134 if (STORE_FLAG_VALUE == 1)
6135 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91
RS
6136
6137 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6138 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6139 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6140 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
23b2ce53 6141}
a11759a3 6142\f
969d70ca
JH
6143/* Produce exact duplicate of insn INSN after AFTER.
6144 Care updating of libcall regions if present. */
6145
cd459bf8 6146rtx_insn *
a1950df3 6147emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
969d70ca 6148{
cd459bf8
DM
6149 rtx_insn *new_rtx;
6150 rtx link;
969d70ca
JH
6151
6152 switch (GET_CODE (insn))
6153 {
6154 case INSN:
60564289 6155 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6156 break;
6157
6158 case JUMP_INSN:
60564289 6159 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6160 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6161 break;
6162
b5b8b0ac
AO
6163 case DEBUG_INSN:
6164 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6165 break;
6166
969d70ca 6167 case CALL_INSN:
60564289 6168 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6169 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6170 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6171 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6172 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6173 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6174 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6175 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6176 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6177 break;
6178
6179 default:
5b0264cb 6180 gcc_unreachable ();
969d70ca
JH
6181 }
6182
6183 /* Update LABEL_NUSES. */
60564289 6184 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6185
5368224f 6186 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6187
0a3d71f5
JW
6188 /* If the old insn is frame related, then so is the new one. This is
6189 primarily needed for IA-64 unwind info which marks epilogue insns,
6190 which may be duplicated by the basic block reordering code. */
60564289 6191 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6192
cf7c4aa6
HPN
6193 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6194 will make them. REG_LABEL_TARGETs are created there too, but are
6195 supposed to be sticky, so we copy them. */
969d70ca 6196 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6197 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
6198 {
6199 if (GET_CODE (link) == EXPR_LIST)
60564289 6200 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 6201 copy_insn_1 (XEXP (link, 0)));
969d70ca 6202 else
e5af9ddd 6203 add_shallow_copy_of_reg_note (new_rtx, link);
969d70ca
JH
6204 }
6205
60564289
KG
6206 INSN_CODE (new_rtx) = INSN_CODE (insn);
6207 return new_rtx;
969d70ca 6208}
e2500fed 6209
1431042e 6210static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
6211rtx
6212gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6213{
6214 if (hard_reg_clobbers[mode][regno])
6215 return hard_reg_clobbers[mode][regno];
6216 else
6217 return (hard_reg_clobbers[mode][regno] =
6218 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6219}
6220
5368224f
DC
6221location_t prologue_location;
6222location_t epilogue_location;
78bde837
SB
6223
6224/* Hold current location information and last location information, so the
6225 datastructures are built lazily only when some instructions in given
6226 place are needed. */
3a50da34 6227static location_t curr_location;
78bde837 6228
5368224f 6229/* Allocate insn location datastructure. */
78bde837 6230void
5368224f 6231insn_locations_init (void)
78bde837 6232{
5368224f 6233 prologue_location = epilogue_location = 0;
78bde837 6234 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6235}
6236
6237/* At the end of emit stage, clear current location. */
6238void
5368224f 6239insn_locations_finalize (void)
78bde837 6240{
5368224f
DC
6241 epilogue_location = curr_location;
6242 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6243}
6244
6245/* Set current location. */
6246void
5368224f 6247set_curr_insn_location (location_t location)
78bde837 6248{
78bde837
SB
6249 curr_location = location;
6250}
6251
6252/* Get current location. */
6253location_t
5368224f 6254curr_insn_location (void)
78bde837
SB
6255{
6256 return curr_location;
6257}
6258
78bde837
SB
6259/* Return lexical scope block insn belongs to. */
6260tree
a1950df3 6261insn_scope (const rtx_insn *insn)
78bde837 6262{
5368224f 6263 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6264}
6265
6266/* Return line number of the statement that produced this insn. */
6267int
a1950df3 6268insn_line (const rtx_insn *insn)
78bde837 6269{
5368224f 6270 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6271}
6272
6273/* Return source file of the statement that produced this insn. */
6274const char *
a1950df3 6275insn_file (const rtx_insn *insn)
78bde837 6276{
5368224f 6277 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6278}
8930883e 6279
ffa4602f
EB
6280/* Return expanded location of the statement that produced this insn. */
6281expanded_location
a1950df3 6282insn_location (const rtx_insn *insn)
ffa4602f
EB
6283{
6284 return expand_location (INSN_LOCATION (insn));
6285}
6286
8930883e
MK
6287/* Return true if memory model MODEL requires a pre-operation (release-style)
6288 barrier or a post-operation (acquire-style) barrier. While not universal,
6289 this function matches behavior of several targets. */
6290
6291bool
6292need_atomic_barrier_p (enum memmodel model, bool pre)
6293{
88e784e6 6294 switch (model & MEMMODEL_MASK)
8930883e
MK
6295 {
6296 case MEMMODEL_RELAXED:
6297 case MEMMODEL_CONSUME:
6298 return false;
6299 case MEMMODEL_RELEASE:
6300 return pre;
6301 case MEMMODEL_ACQUIRE:
6302 return !pre;
6303 case MEMMODEL_ACQ_REL:
6304 case MEMMODEL_SEQ_CST:
6305 return true;
6306 default:
6307 gcc_unreachable ();
6308 }
6309}
6310\f
e2500fed 6311#include "gt-emit-rtl.h"