]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
function.h: Flatten file.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
23a5b65a 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
23b2ce53 3
1322177d 4This file is part of GCC.
23b2ce53 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
23b2ce53 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
23b2ce53
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
23b2ce53
RS
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
f822fcf7
KH
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
23b2ce53
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
718f9c0f 38#include "diagnostic-core.h"
23b2ce53 39#include "rtl.h"
a25c7971 40#include "tree.h"
d8a2d370 41#include "varasm.h"
2fb9a547
AM
42#include "basic-block.h"
43#include "tree-eh.h"
6baf1cc8 44#include "tm_p.h"
23b2ce53 45#include "flags.h"
83685514
AM
46#include "hashtab.h"
47#include "hash-set.h"
48#include "vec.h"
49#include "machmode.h"
50#include "hard-reg-set.h"
51#include "input.h"
23b2ce53 52#include "function.h"
d8a2d370 53#include "stringpool.h"
23b2ce53
RS
54#include "expr.h"
55#include "regs.h"
56#include "insn-config.h"
e9a25f70 57#include "recog.h"
0dfa1860 58#include "bitmap.h"
e1772ac0 59#include "debug.h"
d23c55c2 60#include "langhooks.h"
6fb5fa3c 61#include "df.h"
b5b8b0ac 62#include "params.h"
d4ebfa65 63#include "target.h"
9b2b7279 64#include "builtins.h"
9021b8ec 65#include "rtl-iter.h"
ca695ac9 66
5fb0e246
RS
67struct target_rtl default_target_rtl;
68#if SWITCHABLE_TARGET
69struct target_rtl *this_target_rtl = &default_target_rtl;
70#endif
71
72#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
73
1d445e9e
ILT
74/* Commonly used modes. */
75
0f41302f
MS
76enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 78enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 79enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 80
bd60bab2
JH
81/* Datastructures maintained for currently processed function in RTL form. */
82
3e029763 83struct rtl_data x_rtl;
bd60bab2
JH
84
85/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 86 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
87 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
88 with length attribute nested in top level structures. */
89
90rtx * regno_reg_rtx;
23b2ce53
RS
91
92/* This is *not* reset after each function. It gives each CODE_LABEL
93 in the entire compilation a unique label number. */
94
044b4de3 95static GTY(()) int label_num = 1;
23b2ce53 96
23b2ce53
RS
97/* We record floating-point CONST_DOUBLEs in each floating-point mode for
98 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
99 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
100 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 101
e7c82a99 102rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 103
68d75312
JC
104rtx const_true_rtx;
105
23b2ce53
RS
106REAL_VALUE_TYPE dconst0;
107REAL_VALUE_TYPE dconst1;
108REAL_VALUE_TYPE dconst2;
109REAL_VALUE_TYPE dconstm1;
03f2ea93 110REAL_VALUE_TYPE dconsthalf;
23b2ce53 111
325217ed
CF
112/* Record fixed-point constant 0 and 1. */
113FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
114FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
115
23b2ce53
RS
116/* We make one copy of (const_int C) where C is in
117 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
118 to save space during the compilation and simplify comparisons of
119 integers. */
120
5da077de 121rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 122
ca4adc91
RS
123/* Standard pieces of rtx, to be substituted directly into things. */
124rtx pc_rtx;
125rtx ret_rtx;
126rtx simple_return_rtx;
127rtx cc0_rtx;
128
c13e8210
MM
129/* A hash table storing CONST_INTs whose absolute value is greater
130 than MAX_SAVED_CONST_INT. */
131
e2500fed
GK
132static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
133 htab_t const_int_htab;
c13e8210 134
807e902e
KZ
135static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
136 htab_t const_wide_int_htab;
137
a560d4d4
JH
138/* A hash table storing register attribute structures. */
139static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
140 htab_t reg_attrs_htab;
141
5692c7bc 142/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
143static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
144 htab_t const_double_htab;
5692c7bc 145
091a3ac7
CF
146/* A hash table storing all CONST_FIXEDs. */
147static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
148 htab_t const_fixed_htab;
149
3e029763 150#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 151#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763 152#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 153
5eb2a9f2 154static void set_used_decls (tree);
502b8322
AJ
155static void mark_label_nuses (rtx);
156static hashval_t const_int_htab_hash (const void *);
157static int const_int_htab_eq (const void *, const void *);
807e902e
KZ
158#if TARGET_SUPPORTS_WIDE_INT
159static hashval_t const_wide_int_htab_hash (const void *);
160static int const_wide_int_htab_eq (const void *, const void *);
161static rtx lookup_const_wide_int (rtx);
162#endif
502b8322
AJ
163static hashval_t const_double_htab_hash (const void *);
164static int const_double_htab_eq (const void *, const void *);
165static rtx lookup_const_double (rtx);
091a3ac7
CF
166static hashval_t const_fixed_htab_hash (const void *);
167static int const_fixed_htab_eq (const void *, const void *);
168static rtx lookup_const_fixed (rtx);
502b8322
AJ
169static hashval_t reg_attrs_htab_hash (const void *);
170static int reg_attrs_htab_eq (const void *, const void *);
171static reg_attrs *get_reg_attrs (tree, int);
a73b091d 172static rtx gen_const_vector (enum machine_mode, int);
32b32b16 173static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 174
6b24c259
JH
175/* Probability of the conditional branch currently proceeded by try_split.
176 Set to -1 otherwise. */
177int split_branch_probability = -1;
ca695ac9 178\f
c13e8210
MM
179/* Returns a hash code for X (which is a really a CONST_INT). */
180
181static hashval_t
502b8322 182const_int_htab_hash (const void *x)
c13e8210 183{
f7d504c2 184 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
185}
186
cc2902df 187/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
188 CONST_INT) is the same as that given by Y (which is really a
189 HOST_WIDE_INT *). */
190
191static int
502b8322 192const_int_htab_eq (const void *x, const void *y)
c13e8210 193{
f7d504c2 194 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
195}
196
807e902e
KZ
197#if TARGET_SUPPORTS_WIDE_INT
198/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
199
200static hashval_t
201const_wide_int_htab_hash (const void *x)
202{
203 int i;
204 HOST_WIDE_INT hash = 0;
205 const_rtx xr = (const_rtx) x;
206
207 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
208 hash += CONST_WIDE_INT_ELT (xr, i);
209
210 return (hashval_t) hash;
211}
212
213/* Returns nonzero if the value represented by X (which is really a
214 CONST_WIDE_INT) is the same as that given by Y (which is really a
215 CONST_WIDE_INT). */
216
217static int
218const_wide_int_htab_eq (const void *x, const void *y)
219{
220 int i;
221 const_rtx xr = (const_rtx) x;
222 const_rtx yr = (const_rtx) y;
223 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
224 return 0;
225
226 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
227 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
228 return 0;
229
230 return 1;
231}
232#endif
233
5692c7bc
ZW
234/* Returns a hash code for X (which is really a CONST_DOUBLE). */
235static hashval_t
502b8322 236const_double_htab_hash (const void *x)
5692c7bc 237{
f7d504c2 238 const_rtx const value = (const_rtx) x;
46b33600 239 hashval_t h;
5692c7bc 240
807e902e 241 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
46b33600
RH
242 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
243 else
fe352c29 244 {
15c812e3 245 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
246 /* MODE is used in the comparison, so it should be in the hash. */
247 h ^= GET_MODE (value);
248 }
5692c7bc
ZW
249 return h;
250}
251
cc2902df 252/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
253 is the same as that represented by Y (really a ...) */
254static int
502b8322 255const_double_htab_eq (const void *x, const void *y)
5692c7bc 256{
f7d504c2 257 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
258
259 if (GET_MODE (a) != GET_MODE (b))
260 return 0;
807e902e 261 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
8580f7a0
RH
262 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
263 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
264 else
265 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
266 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
267}
268
091a3ac7
CF
269/* Returns a hash code for X (which is really a CONST_FIXED). */
270
271static hashval_t
272const_fixed_htab_hash (const void *x)
273{
3101faab 274 const_rtx const value = (const_rtx) x;
091a3ac7
CF
275 hashval_t h;
276
277 h = fixed_hash (CONST_FIXED_VALUE (value));
278 /* MODE is used in the comparison, so it should be in the hash. */
279 h ^= GET_MODE (value);
280 return h;
281}
282
283/* Returns nonzero if the value represented by X (really a ...)
284 is the same as that represented by Y (really a ...). */
285
286static int
287const_fixed_htab_eq (const void *x, const void *y)
288{
3101faab 289 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
290
291 if (GET_MODE (a) != GET_MODE (b))
292 return 0;
293 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
294}
295
f12144dd 296/* Return true if the given memory attributes are equal. */
c13e8210 297
96b3c03f 298bool
f12144dd 299mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 300{
96b3c03f
RB
301 if (p == q)
302 return true;
303 if (!p || !q)
304 return false;
754c3d5d
RS
305 return (p->alias == q->alias
306 && p->offset_known_p == q->offset_known_p
307 && (!p->offset_known_p || p->offset == q->offset)
308 && p->size_known_p == q->size_known_p
309 && (!p->size_known_p || p->size == q->size)
310 && p->align == q->align
09e881c9 311 && p->addrspace == q->addrspace
78b76d08
SB
312 && (p->expr == q->expr
313 || (p->expr != NULL_TREE && q->expr != NULL_TREE
314 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
315}
316
f12144dd 317/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 318
f12144dd
RS
319static void
320set_mem_attrs (rtx mem, mem_attrs *attrs)
321{
f12144dd
RS
322 /* If everything is the default, we can just clear the attributes. */
323 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
324 {
325 MEM_ATTRS (mem) = 0;
326 return;
327 }
173b24b9 328
84053e02
RB
329 if (!MEM_ATTRS (mem)
330 || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
173b24b9 331 {
766090c2 332 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
84053e02 333 memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
173b24b9 334 }
c13e8210
MM
335}
336
a560d4d4
JH
337/* Returns a hash code for X (which is a really a reg_attrs *). */
338
339static hashval_t
502b8322 340reg_attrs_htab_hash (const void *x)
a560d4d4 341{
741ac903 342 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4 343
9841210f 344 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
345}
346
6356f892 347/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
348 reg_attrs *) is the same as that given by Y (which is also really a
349 reg_attrs *). */
350
351static int
502b8322 352reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 353{
741ac903
KG
354 const reg_attrs *const p = (const reg_attrs *) x;
355 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
356
357 return (p->decl == q->decl && p->offset == q->offset);
358}
359/* Allocate a new reg_attrs structure and insert it into the hash table if
360 one identical to it is not already in the table. We are doing this for
361 MEM of mode MODE. */
362
363static reg_attrs *
502b8322 364get_reg_attrs (tree decl, int offset)
a560d4d4
JH
365{
366 reg_attrs attrs;
367 void **slot;
368
369 /* If everything is the default, we can just return zero. */
370 if (decl == 0 && offset == 0)
371 return 0;
372
373 attrs.decl = decl;
374 attrs.offset = offset;
375
376 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
377 if (*slot == 0)
378 {
766090c2 379 *slot = ggc_alloc<reg_attrs> ();
a560d4d4
JH
380 memcpy (*slot, &attrs, sizeof (reg_attrs));
381 }
382
1b4572a8 383 return (reg_attrs *) *slot;
a560d4d4
JH
384}
385
6fb5fa3c
DB
386
387#if !HAVE_blockage
adddc347
HPN
388/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
389 and to block register equivalences to be seen across this insn. */
6fb5fa3c
DB
390
391rtx
392gen_blockage (void)
393{
394 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
395 MEM_VOLATILE_P (x) = true;
396 return x;
397}
398#endif
399
400
08394eef
BS
401/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
402 don't attempt to share with the various global pieces of rtl (such as
403 frame_pointer_rtx). */
404
405rtx
502b8322 406gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
407{
408 rtx x = gen_rtx_raw_REG (mode, regno);
409 ORIGINAL_REGNO (x) = regno;
410 return x;
411}
412
c5c76735
JL
413/* There are some RTL codes that require special attention; the generation
414 functions do the raw handling. If you add to this list, modify
415 special_rtx in gengenrtl.c as well. */
416
38e60c55
DM
417rtx_expr_list *
418gen_rtx_EXPR_LIST (enum machine_mode mode, rtx expr, rtx expr_list)
419{
420 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
421 expr_list));
422}
423
a756c6be
DM
424rtx_insn_list *
425gen_rtx_INSN_LIST (enum machine_mode mode, rtx insn, rtx insn_list)
426{
427 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
428 insn_list));
429}
430
d6e1e8b8
DM
431rtx_insn *
432gen_rtx_INSN (enum machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
433 basic_block bb, rtx pattern, int location, int code,
434 rtx reg_notes)
435{
436 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
437 prev_insn, next_insn,
438 bb, pattern, location, code,
439 reg_notes));
440}
441
3b80f6ca 442rtx
502b8322 443gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 444{
c13e8210
MM
445 void **slot;
446
3b80f6ca 447 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 448 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
449
450#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
451 if (const_true_rtx && arg == STORE_FLAG_VALUE)
452 return const_true_rtx;
453#endif
454
c13e8210 455 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
456 slot = htab_find_slot_with_hash (const_int_htab, &arg,
457 (hashval_t) arg, INSERT);
29105cea 458 if (*slot == 0)
1f8f4a0b 459 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
460
461 return (rtx) *slot;
3b80f6ca
RH
462}
463
2496c7bd 464rtx
502b8322 465gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
466{
467 return GEN_INT (trunc_int_for_mode (c, mode));
468}
469
5692c7bc
ZW
470/* CONST_DOUBLEs might be created from pairs of integers, or from
471 REAL_VALUE_TYPEs. Also, their length is known only at run time,
472 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
473
474/* Determine whether REAL, a CONST_DOUBLE, already exists in the
475 hash table. If so, return its counterpart; otherwise add it
476 to the hash table and return it. */
477static rtx
502b8322 478lookup_const_double (rtx real)
5692c7bc
ZW
479{
480 void **slot = htab_find_slot (const_double_htab, real, INSERT);
481 if (*slot == 0)
482 *slot = real;
483
484 return (rtx) *slot;
485}
29105cea 486
5692c7bc
ZW
487/* Return a CONST_DOUBLE rtx for a floating-point value specified by
488 VALUE in mode MODE. */
0133b7d9 489rtx
502b8322 490const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 491{
5692c7bc
ZW
492 rtx real = rtx_alloc (CONST_DOUBLE);
493 PUT_MODE (real, mode);
494
9e254451 495 real->u.rv = value;
5692c7bc
ZW
496
497 return lookup_const_double (real);
498}
499
091a3ac7
CF
500/* Determine whether FIXED, a CONST_FIXED, already exists in the
501 hash table. If so, return its counterpart; otherwise add it
502 to the hash table and return it. */
503
504static rtx
505lookup_const_fixed (rtx fixed)
506{
507 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
508 if (*slot == 0)
509 *slot = fixed;
510
511 return (rtx) *slot;
512}
513
514/* Return a CONST_FIXED rtx for a fixed-point value specified by
515 VALUE in mode MODE. */
516
517rtx
518const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
519{
520 rtx fixed = rtx_alloc (CONST_FIXED);
521 PUT_MODE (fixed, mode);
522
523 fixed->u.fv = value;
524
525 return lookup_const_fixed (fixed);
526}
527
807e902e 528#if TARGET_SUPPORTS_WIDE_INT == 0
3e93ff81
AS
529/* Constructs double_int from rtx CST. */
530
531double_int
532rtx_to_double_int (const_rtx cst)
533{
534 double_int r;
535
536 if (CONST_INT_P (cst))
27bcd47c 537 r = double_int::from_shwi (INTVAL (cst));
48175537 538 else if (CONST_DOUBLE_AS_INT_P (cst))
3e93ff81
AS
539 {
540 r.low = CONST_DOUBLE_LOW (cst);
541 r.high = CONST_DOUBLE_HIGH (cst);
542 }
543 else
544 gcc_unreachable ();
545
546 return r;
547}
807e902e 548#endif
3e93ff81 549
807e902e
KZ
550#if TARGET_SUPPORTS_WIDE_INT
551/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
552 If so, return its counterpart; otherwise add it to the hash table and
553 return it. */
3e93ff81 554
807e902e
KZ
555static rtx
556lookup_const_wide_int (rtx wint)
557{
558 void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
559 if (*slot == 0)
560 *slot = wint;
561
562 return (rtx) *slot;
563}
564#endif
565
566/* Return an rtx constant for V, given that the constant has mode MODE.
567 The returned rtx will be a CONST_INT if V fits, otherwise it will be
568 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
569 (if TARGET_SUPPORTS_WIDE_INT). */
54fb1ae0
AS
570
571rtx
807e902e 572immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode)
54fb1ae0 573{
807e902e
KZ
574 unsigned int len = v.get_len ();
575 unsigned int prec = GET_MODE_PRECISION (mode);
576
577 /* Allow truncation but not extension since we do not know if the
578 number is signed or unsigned. */
579 gcc_assert (prec <= v.get_precision ());
580
581 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
582 return gen_int_mode (v.elt (0), mode);
583
584#if TARGET_SUPPORTS_WIDE_INT
585 {
586 unsigned int i;
587 rtx value;
588 unsigned int blocks_needed
589 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
590
591 if (len > blocks_needed)
592 len = blocks_needed;
593
594 value = const_wide_int_alloc (len);
595
596 /* It is so tempting to just put the mode in here. Must control
597 myself ... */
598 PUT_MODE (value, VOIDmode);
599 CWI_PUT_NUM_ELEM (value, len);
600
601 for (i = 0; i < len; i++)
602 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
603
604 return lookup_const_wide_int (value);
605 }
606#else
607 return immed_double_const (v.elt (0), v.elt (1), mode);
608#endif
54fb1ae0
AS
609}
610
807e902e 611#if TARGET_SUPPORTS_WIDE_INT == 0
5692c7bc
ZW
612/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
613 of ints: I0 is the low-order word and I1 is the high-order word.
49ab6098 614 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
929e10f4
MS
615 implied upper bits are copies of the high bit of i1. The value
616 itself is neither signed nor unsigned. Do not use this routine for
617 non-integer modes; convert to REAL_VALUE_TYPE and use
618 CONST_DOUBLE_FROM_REAL_VALUE. */
5692c7bc
ZW
619
620rtx
502b8322 621immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
622{
623 rtx value;
624 unsigned int i;
625
65acccdd 626 /* There are the following cases (note that there are no modes with
49ab6098 627 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
65acccdd
ZD
628
629 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
630 gen_int_mode.
929e10f4
MS
631 2) If the value of the integer fits into HOST_WIDE_INT anyway
632 (i.e., i1 consists only from copies of the sign bit, and sign
633 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 634 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
635 if (mode != VOIDmode)
636 {
5b0264cb
NS
637 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
638 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
639 /* We can get a 0 for an error mark. */
640 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
641 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 642
65acccdd
ZD
643 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
644 return gen_int_mode (i0, mode);
5692c7bc
ZW
645 }
646
647 /* If this integer fits in one word, return a CONST_INT. */
648 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
649 return GEN_INT (i0);
650
651 /* We use VOIDmode for integers. */
652 value = rtx_alloc (CONST_DOUBLE);
653 PUT_MODE (value, VOIDmode);
654
655 CONST_DOUBLE_LOW (value) = i0;
656 CONST_DOUBLE_HIGH (value) = i1;
657
658 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
659 XWINT (value, i) = 0;
660
661 return lookup_const_double (value);
0133b7d9 662}
807e902e 663#endif
0133b7d9 664
3b80f6ca 665rtx
502b8322 666gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
667{
668 /* In case the MD file explicitly references the frame pointer, have
669 all such references point to the same frame pointer. This is
670 used during frame pointer elimination to distinguish the explicit
671 references to these registers from pseudos that happened to be
672 assigned to them.
673
674 If we have eliminated the frame pointer or arg pointer, we will
675 be using it as a normal register, for example as a spill
676 register. In such cases, we might be accessing it in a mode that
677 is not Pmode and therefore cannot use the pre-allocated rtx.
678
679 Also don't do this when we are making new REGs in reload, since
680 we don't want to get confused with the real pointers. */
681
55a2c322 682 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
3b80f6ca 683 {
e10c79fe
LB
684 if (regno == FRAME_POINTER_REGNUM
685 && (!reload_completed || frame_pointer_needed))
3b80f6ca 686 return frame_pointer_rtx;
e3339d0f 687#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
688 if (regno == HARD_FRAME_POINTER_REGNUM
689 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
690 return hard_frame_pointer_rtx;
691#endif
e3339d0f 692#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 693 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
694 return arg_pointer_rtx;
695#endif
696#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 697 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
698 return return_address_pointer_rtx;
699#endif
fc555370 700 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 701 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 702 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 703 return pic_offset_table_rtx;
bcb33994 704 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
705 return stack_pointer_rtx;
706 }
707
006a94b0 708#if 0
6cde4876 709 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
710 an existing entry in that table to avoid useless generation of RTL.
711
712 This code is disabled for now until we can fix the various backends
713 which depend on having non-shared hard registers in some cases. Long
714 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
715 on the amount of useless RTL that gets generated.
716
717 We'll also need to fix some code that runs after reload that wants to
718 set ORIGINAL_REGNO. */
719
6cde4876
JL
720 if (cfun
721 && cfun->emit
722 && regno_reg_rtx
723 && regno < FIRST_PSEUDO_REGISTER
724 && reg_raw_mode[regno] == mode)
725 return regno_reg_rtx[regno];
006a94b0 726#endif
6cde4876 727
08394eef 728 return gen_raw_REG (mode, regno);
3b80f6ca
RH
729}
730
41472af8 731rtx
502b8322 732gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
733{
734 rtx rt = gen_rtx_raw_MEM (mode, addr);
735
736 /* This field is not cleared by the mere allocation of the rtx, so
737 we clear it here. */
173b24b9 738 MEM_ATTRS (rt) = 0;
41472af8
MM
739
740 return rt;
741}
ddef6bc7 742
542a8afa
RH
743/* Generate a memory referring to non-trapping constant memory. */
744
745rtx
746gen_const_mem (enum machine_mode mode, rtx addr)
747{
748 rtx mem = gen_rtx_MEM (mode, addr);
749 MEM_READONLY_P (mem) = 1;
750 MEM_NOTRAP_P (mem) = 1;
751 return mem;
752}
753
bf877a76
R
754/* Generate a MEM referring to fixed portions of the frame, e.g., register
755 save areas. */
756
757rtx
758gen_frame_mem (enum machine_mode mode, rtx addr)
759{
760 rtx mem = gen_rtx_MEM (mode, addr);
761 MEM_NOTRAP_P (mem) = 1;
762 set_mem_alias_set (mem, get_frame_alias_set ());
763 return mem;
764}
765
766/* Generate a MEM referring to a temporary use of the stack, not part
767 of the fixed stack frame. For example, something which is pushed
768 by a target splitter. */
769rtx
770gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
771{
772 rtx mem = gen_rtx_MEM (mode, addr);
773 MEM_NOTRAP_P (mem) = 1;
e3b5732b 774 if (!cfun->calls_alloca)
bf877a76
R
775 set_mem_alias_set (mem, get_frame_alias_set ());
776 return mem;
777}
778
beb72684
RH
779/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
780 this construct would be valid, and false otherwise. */
781
782bool
783validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 784 const_rtx reg, unsigned int offset)
ddef6bc7 785{
beb72684
RH
786 unsigned int isize = GET_MODE_SIZE (imode);
787 unsigned int osize = GET_MODE_SIZE (omode);
788
789 /* All subregs must be aligned. */
790 if (offset % osize != 0)
791 return false;
792
793 /* The subreg offset cannot be outside the inner object. */
794 if (offset >= isize)
795 return false;
796
797 /* ??? This should not be here. Temporarily continue to allow word_mode
798 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
799 Generally, backends are doing something sketchy but it'll take time to
800 fix them all. */
801 if (omode == word_mode)
802 ;
803 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
804 is the culprit here, and not the backends. */
805 else if (osize >= UNITS_PER_WORD && isize >= osize)
806 ;
807 /* Allow component subregs of complex and vector. Though given the below
808 extraction rules, it's not always clear what that means. */
809 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
810 && GET_MODE_INNER (imode) == omode)
811 ;
812 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
813 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
814 represent this. It's questionable if this ought to be represented at
815 all -- why can't this all be hidden in post-reload splitters that make
816 arbitrarily mode changes to the registers themselves. */
817 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
818 ;
819 /* Subregs involving floating point modes are not allowed to
820 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
821 (subreg:SI (reg:DF) 0) isn't. */
822 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
823 {
55a2c322
VM
824 if (! (isize == osize
825 /* LRA can use subreg to store a floating point value in
826 an integer mode. Although the floating point and the
827 integer modes need the same number of hard registers,
828 the size of floating point mode can be less than the
829 integer mode. LRA also uses subregs for a register
830 should be used in different mode in on insn. */
831 || lra_in_progress))
beb72684
RH
832 return false;
833 }
ddef6bc7 834
beb72684
RH
835 /* Paradoxical subregs must have offset zero. */
836 if (osize > isize)
837 return offset == 0;
838
839 /* This is a normal subreg. Verify that the offset is representable. */
840
841 /* For hard registers, we already have most of these rules collected in
842 subreg_offset_representable_p. */
843 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
844 {
845 unsigned int regno = REGNO (reg);
846
847#ifdef CANNOT_CHANGE_MODE_CLASS
848 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
849 && GET_MODE_INNER (imode) == omode)
850 ;
851 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
852 return false;
ddef6bc7 853#endif
beb72684
RH
854
855 return subreg_offset_representable_p (regno, imode, offset, omode);
856 }
857
858 /* For pseudo registers, we want most of the same checks. Namely:
859 If the register no larger than a word, the subreg must be lowpart.
860 If the register is larger than a word, the subreg must be the lowpart
861 of a subword. A subreg does *not* perform arbitrary bit extraction.
862 Given that we've already checked mode/offset alignment, we only have
863 to check subword subregs here. */
55a2c322
VM
864 if (osize < UNITS_PER_WORD
865 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
beb72684
RH
866 {
867 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
868 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
869 if (offset % UNITS_PER_WORD != low_off)
870 return false;
871 }
872 return true;
873}
874
875rtx
876gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
877{
878 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 879 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
880}
881
173b24b9
RK
882/* Generate a SUBREG representing the least-significant part of REG if MODE
883 is smaller than mode of REG, otherwise paradoxical SUBREG. */
884
ddef6bc7 885rtx
502b8322 886gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
887{
888 enum machine_mode inmode;
ddef6bc7
JJ
889
890 inmode = GET_MODE (reg);
891 if (inmode == VOIDmode)
892 inmode = mode;
e0e08ac2
JH
893 return gen_rtx_SUBREG (mode, reg,
894 subreg_lowpart_offset (mode, inmode));
ddef6bc7 895}
fcc74520
RS
896
897rtx
898gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc,
899 enum var_init_status status)
900{
901 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
902 PAT_VAR_LOCATION_STATUS (x) = status;
903 return x;
904}
c5c76735 905\f
23b2ce53 906
80379f51
PB
907/* Create an rtvec and stores within it the RTXen passed in the arguments. */
908
23b2ce53 909rtvec
e34d07f2 910gen_rtvec (int n, ...)
23b2ce53 911{
80379f51
PB
912 int i;
913 rtvec rt_val;
e34d07f2 914 va_list p;
23b2ce53 915
e34d07f2 916 va_start (p, n);
23b2ce53 917
80379f51 918 /* Don't allocate an empty rtvec... */
23b2ce53 919 if (n == 0)
0edf1bb2
JL
920 {
921 va_end (p);
922 return NULL_RTVEC;
923 }
23b2ce53 924
80379f51 925 rt_val = rtvec_alloc (n);
4f90e4a0 926
23b2ce53 927 for (i = 0; i < n; i++)
80379f51 928 rt_val->elem[i] = va_arg (p, rtx);
6268b922 929
e34d07f2 930 va_end (p);
80379f51 931 return rt_val;
23b2ce53
RS
932}
933
934rtvec
502b8322 935gen_rtvec_v (int n, rtx *argp)
23b2ce53 936{
b3694847
SS
937 int i;
938 rtvec rt_val;
23b2ce53 939
80379f51 940 /* Don't allocate an empty rtvec... */
23b2ce53 941 if (n == 0)
80379f51 942 return NULL_RTVEC;
23b2ce53 943
80379f51 944 rt_val = rtvec_alloc (n);
23b2ce53
RS
945
946 for (i = 0; i < n; i++)
8f985ec4 947 rt_val->elem[i] = *argp++;
23b2ce53
RS
948
949 return rt_val;
950}
e6eda746
DM
951
952rtvec
953gen_rtvec_v (int n, rtx_insn **argp)
954{
955 int i;
956 rtvec rt_val;
957
958 /* Don't allocate an empty rtvec... */
959 if (n == 0)
960 return NULL_RTVEC;
961
962 rt_val = rtvec_alloc (n);
963
964 for (i = 0; i < n; i++)
965 rt_val->elem[i] = *argp++;
966
967 return rt_val;
968}
969
23b2ce53 970\f
38ae7651
RS
971/* Return the number of bytes between the start of an OUTER_MODE
972 in-memory value and the start of an INNER_MODE in-memory value,
973 given that the former is a lowpart of the latter. It may be a
974 paradoxical lowpart, in which case the offset will be negative
975 on big-endian targets. */
976
977int
978byte_lowpart_offset (enum machine_mode outer_mode,
979 enum machine_mode inner_mode)
980{
981 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
982 return subreg_lowpart_offset (outer_mode, inner_mode);
983 else
984 return -subreg_lowpart_offset (inner_mode, outer_mode);
985}
986\f
23b2ce53
RS
987/* Generate a REG rtx for a new pseudo register of mode MODE.
988 This pseudo is assigned the next sequential register number. */
989
990rtx
502b8322 991gen_reg_rtx (enum machine_mode mode)
23b2ce53 992{
b3694847 993 rtx val;
2e3f842f 994 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 995
f8335a4f 996 gcc_assert (can_create_pseudo_p ());
23b2ce53 997
2e3f842f
L
998 /* If a virtual register with bigger mode alignment is generated,
999 increase stack alignment estimation because it might be spilled
1000 to stack later. */
b8698a0f 1001 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
1002 && crtl->stack_alignment_estimated < align
1003 && !crtl->stack_realign_processed)
ae58e548
JJ
1004 {
1005 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1006 if (crtl->stack_alignment_estimated < min_align)
1007 crtl->stack_alignment_estimated = min_align;
1008 }
2e3f842f 1009
1b3d8f8a
GK
1010 if (generating_concat_p
1011 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1012 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
1013 {
1014 /* For complex modes, don't make a single pseudo.
1015 Instead, make a CONCAT of two pseudos.
1016 This allows noncontiguous allocation of the real and imaginary parts,
1017 which makes much better code. Besides, allocating DCmode
1018 pseudos overstrains reload on some machines like the 386. */
1019 rtx realpart, imagpart;
27e58a70 1020 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
1021
1022 realpart = gen_reg_rtx (partmode);
1023 imagpart = gen_reg_rtx (partmode);
3b80f6ca 1024 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
1025 }
1026
004a7e45
UB
1027 /* Do not call gen_reg_rtx with uninitialized crtl. */
1028 gcc_assert (crtl->emit.regno_pointer_align_length);
1029
a560d4d4 1030 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 1031 enough to have an element for this pseudo reg number. */
23b2ce53 1032
3e029763 1033 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 1034 {
3e029763 1035 int old_size = crtl->emit.regno_pointer_align_length;
60564289 1036 char *tmp;
0d4903b8 1037 rtx *new1;
0d4903b8 1038
60564289
KG
1039 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1040 memset (tmp + old_size, 0, old_size);
1041 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 1042
1b4572a8 1043 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 1044 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
1045 regno_reg_rtx = new1;
1046
3e029763 1047 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
1048 }
1049
08394eef 1050 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
1051 regno_reg_rtx[reg_rtx_no++] = val;
1052 return val;
1053}
1054
a698cc03
JL
1055/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1056
1057bool
1058reg_is_parm_p (rtx reg)
1059{
1060 tree decl;
1061
1062 gcc_assert (REG_P (reg));
1063 decl = REG_EXPR (reg);
1064 return (decl && TREE_CODE (decl) == PARM_DECL);
1065}
1066
38ae7651
RS
1067/* Update NEW with the same attributes as REG, but with OFFSET added
1068 to the REG_OFFSET. */
a560d4d4 1069
e53a16e7 1070static void
60564289 1071update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 1072{
60564289 1073 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 1074 REG_OFFSET (reg) + offset);
e53a16e7
ILT
1075}
1076
38ae7651
RS
1077/* Generate a register with same attributes as REG, but with OFFSET
1078 added to the REG_OFFSET. */
e53a16e7
ILT
1079
1080rtx
1081gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
1082 int offset)
1083{
60564289 1084 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 1085
60564289
KG
1086 update_reg_offset (new_rtx, reg, offset);
1087 return new_rtx;
e53a16e7
ILT
1088}
1089
1090/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 1091 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
1092
1093rtx
1094gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
1095{
60564289 1096 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 1097
60564289
KG
1098 update_reg_offset (new_rtx, reg, offset);
1099 return new_rtx;
a560d4d4
JH
1100}
1101
38ae7651
RS
1102/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1103 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
1104
1105void
38ae7651 1106adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 1107{
38ae7651
RS
1108 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1109 PUT_MODE (reg, mode);
1110}
1111
1112/* Copy REG's attributes from X, if X has any attributes. If REG and X
1113 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1114
1115void
1116set_reg_attrs_from_value (rtx reg, rtx x)
1117{
1118 int offset;
de6f3f7a
L
1119 bool can_be_reg_pointer = true;
1120
1121 /* Don't call mark_reg_pointer for incompatible pointer sign
1122 extension. */
1123 while (GET_CODE (x) == SIGN_EXTEND
1124 || GET_CODE (x) == ZERO_EXTEND
1125 || GET_CODE (x) == TRUNCATE
1126 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1127 {
1128#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1129 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1130 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1131 can_be_reg_pointer = false;
1132#endif
1133 x = XEXP (x, 0);
1134 }
38ae7651 1135
923ba36f
JJ
1136 /* Hard registers can be reused for multiple purposes within the same
1137 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1138 on them is wrong. */
1139 if (HARD_REGISTER_P (reg))
1140 return;
1141
38ae7651 1142 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
1143 if (MEM_P (x))
1144 {
527210c4
RS
1145 if (MEM_OFFSET_KNOWN_P (x))
1146 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1147 MEM_OFFSET (x) + offset);
de6f3f7a 1148 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1149 mark_reg_pointer (reg, 0);
46b71b03
PB
1150 }
1151 else if (REG_P (x))
1152 {
1153 if (REG_ATTRS (x))
1154 update_reg_offset (reg, x, offset);
de6f3f7a 1155 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1156 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1157 }
1158}
1159
1160/* Generate a REG rtx for a new pseudo register, copying the mode
1161 and attributes from X. */
1162
1163rtx
1164gen_reg_rtx_and_attrs (rtx x)
1165{
1166 rtx reg = gen_reg_rtx (GET_MODE (x));
1167 set_reg_attrs_from_value (reg, x);
1168 return reg;
a560d4d4
JH
1169}
1170
9d18e06b
JZ
1171/* Set the register attributes for registers contained in PARM_RTX.
1172 Use needed values from memory attributes of MEM. */
1173
1174void
502b8322 1175set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1176{
f8cfc6aa 1177 if (REG_P (parm_rtx))
38ae7651 1178 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1179 else if (GET_CODE (parm_rtx) == PARALLEL)
1180 {
1181 /* Check for a NULL entry in the first slot, used to indicate that the
1182 parameter goes both on the stack and in registers. */
1183 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1184 for (; i < XVECLEN (parm_rtx, 0); i++)
1185 {
1186 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1187 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1188 REG_ATTRS (XEXP (x, 0))
1189 = get_reg_attrs (MEM_EXPR (mem),
1190 INTVAL (XEXP (x, 1)));
1191 }
1192 }
1193}
1194
38ae7651
RS
1195/* Set the REG_ATTRS for registers in value X, given that X represents
1196 decl T. */
a560d4d4 1197
4e3825db 1198void
38ae7651
RS
1199set_reg_attrs_for_decl_rtl (tree t, rtx x)
1200{
1201 if (GET_CODE (x) == SUBREG)
fbe6ec81 1202 {
38ae7651
RS
1203 gcc_assert (subreg_lowpart_p (x));
1204 x = SUBREG_REG (x);
fbe6ec81 1205 }
f8cfc6aa 1206 if (REG_P (x))
38ae7651
RS
1207 REG_ATTRS (x)
1208 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1209 DECL_MODE (t)));
a560d4d4
JH
1210 if (GET_CODE (x) == CONCAT)
1211 {
1212 if (REG_P (XEXP (x, 0)))
1213 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1214 if (REG_P (XEXP (x, 1)))
1215 REG_ATTRS (XEXP (x, 1))
1216 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1217 }
1218 if (GET_CODE (x) == PARALLEL)
1219 {
d4afac5b
JZ
1220 int i, start;
1221
1222 /* Check for a NULL entry, used to indicate that the parameter goes
1223 both on the stack and in registers. */
1224 if (XEXP (XVECEXP (x, 0, 0), 0))
1225 start = 0;
1226 else
1227 start = 1;
1228
1229 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1230 {
1231 rtx y = XVECEXP (x, 0, i);
1232 if (REG_P (XEXP (y, 0)))
1233 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1234 }
1235 }
1236}
1237
38ae7651
RS
1238/* Assign the RTX X to declaration T. */
1239
1240void
1241set_decl_rtl (tree t, rtx x)
1242{
1243 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1244 if (x)
1245 set_reg_attrs_for_decl_rtl (t, x);
1246}
1247
5141868d
RS
1248/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1249 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1250
1251void
5141868d 1252set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1253{
1254 DECL_INCOMING_RTL (t) = x;
5141868d 1255 if (x && !by_reference_p)
38ae7651
RS
1256 set_reg_attrs_for_decl_rtl (t, x);
1257}
1258
754fdcca
RK
1259/* Identify REG (which may be a CONCAT) as a user register. */
1260
1261void
502b8322 1262mark_user_reg (rtx reg)
754fdcca
RK
1263{
1264 if (GET_CODE (reg) == CONCAT)
1265 {
1266 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1267 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1268 }
754fdcca 1269 else
5b0264cb
NS
1270 {
1271 gcc_assert (REG_P (reg));
1272 REG_USERVAR_P (reg) = 1;
1273 }
754fdcca
RK
1274}
1275
86fe05e0
RK
1276/* Identify REG as a probable pointer register and show its alignment
1277 as ALIGN, if nonzero. */
23b2ce53
RS
1278
1279void
502b8322 1280mark_reg_pointer (rtx reg, int align)
23b2ce53 1281{
3502dc9c 1282 if (! REG_POINTER (reg))
00995e78 1283 {
3502dc9c 1284 REG_POINTER (reg) = 1;
86fe05e0 1285
00995e78
RE
1286 if (align)
1287 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1288 }
1289 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1290 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1291 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1292}
1293
1294/* Return 1 plus largest pseudo reg number used in the current function. */
1295
1296int
502b8322 1297max_reg_num (void)
23b2ce53
RS
1298{
1299 return reg_rtx_no;
1300}
1301
1302/* Return 1 + the largest label number used so far in the current function. */
1303
1304int
502b8322 1305max_label_num (void)
23b2ce53 1306{
23b2ce53
RS
1307 return label_num;
1308}
1309
1310/* Return first label number used in this function (if any were used). */
1311
1312int
502b8322 1313get_first_label_num (void)
23b2ce53
RS
1314{
1315 return first_label_num;
1316}
6de9cd9a
DN
1317
1318/* If the rtx for label was created during the expansion of a nested
1319 function, then first_label_num won't include this label number.
fa10beec 1320 Fix this now so that array indices work later. */
6de9cd9a
DN
1321
1322void
1323maybe_set_first_label_num (rtx x)
1324{
1325 if (CODE_LABEL_NUMBER (x) < first_label_num)
1326 first_label_num = CODE_LABEL_NUMBER (x);
1327}
23b2ce53
RS
1328\f
1329/* Return a value representing some low-order bits of X, where the number
1330 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1331 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1332 representation is returned.
1333
1334 This function handles the cases in common between gen_lowpart, below,
1335 and two variants in cse.c and combine.c. These are the cases that can
1336 be safely handled at all points in the compilation.
1337
1338 If this is not a case we can handle, return 0. */
1339
1340rtx
502b8322 1341gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1342{
ddef6bc7 1343 int msize = GET_MODE_SIZE (mode);
550d1387 1344 int xsize;
ddef6bc7 1345 int offset = 0;
550d1387
GK
1346 enum machine_mode innermode;
1347
1348 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1349 so we have to make one up. Yuk. */
1350 innermode = GET_MODE (x);
481683e1 1351 if (CONST_INT_P (x)
db487452 1352 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1353 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1354 else if (innermode == VOIDmode)
49ab6098 1355 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
b8698a0f 1356
550d1387
GK
1357 xsize = GET_MODE_SIZE (innermode);
1358
5b0264cb 1359 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1360
550d1387 1361 if (innermode == mode)
23b2ce53
RS
1362 return x;
1363
1364 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1365 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1366 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1367 return 0;
1368
53501a19 1369 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1370 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1371 return 0;
1372
550d1387 1373 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1374
1375 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1376 && (GET_MODE_CLASS (mode) == MODE_INT
1377 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1378 {
1379 /* If we are getting the low-order part of something that has been
1380 sign- or zero-extended, we can either just use the object being
1381 extended or make a narrower extension. If we want an even smaller
1382 piece than the size of the object being extended, call ourselves
1383 recursively.
1384
1385 This case is used mostly by combine and cse. */
1386
1387 if (GET_MODE (XEXP (x, 0)) == mode)
1388 return XEXP (x, 0);
550d1387 1389 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1390 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1391 else if (msize < xsize)
3b80f6ca 1392 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1393 }
f8cfc6aa 1394 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1395 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
33ffb5c5 1396 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
550d1387 1397 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1398
23b2ce53
RS
1399 /* Otherwise, we can't do this. */
1400 return 0;
1401}
1402\f
ccba022b 1403rtx
502b8322 1404gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1405{
ddef6bc7 1406 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1407 rtx result;
ddef6bc7 1408
ccba022b
RS
1409 /* This case loses if X is a subreg. To catch bugs early,
1410 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1411 gcc_assert (msize <= UNITS_PER_WORD
1412 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1413
e0e08ac2
JH
1414 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1415 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1416 gcc_assert (result);
b8698a0f 1417
09482e0d
JW
1418 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1419 the target if we have a MEM. gen_highpart must return a valid operand,
1420 emitting code if necessary to do so. */
5b0264cb
NS
1421 if (MEM_P (result))
1422 {
1423 result = validize_mem (result);
1424 gcc_assert (result);
1425 }
b8698a0f 1426
e0e08ac2
JH
1427 return result;
1428}
5222e470 1429
26d249eb 1430/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1431 be VOIDmode constant. */
1432rtx
502b8322 1433gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1434{
1435 if (GET_MODE (exp) != VOIDmode)
1436 {
5b0264cb 1437 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1438 return gen_highpart (outermode, exp);
1439 }
1440 return simplify_gen_subreg (outermode, exp, innermode,
1441 subreg_highpart_offset (outermode, innermode));
1442}
68252e27 1443
38ae7651 1444/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1445
e0e08ac2 1446unsigned int
502b8322 1447subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1448{
1449 unsigned int offset = 0;
1450 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1451
e0e08ac2 1452 if (difference > 0)
ccba022b 1453 {
e0e08ac2
JH
1454 if (WORDS_BIG_ENDIAN)
1455 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1456 if (BYTES_BIG_ENDIAN)
1457 offset += difference % UNITS_PER_WORD;
ccba022b 1458 }
ddef6bc7 1459
e0e08ac2 1460 return offset;
ccba022b 1461}
eea50aa0 1462
e0e08ac2
JH
1463/* Return offset in bytes to get OUTERMODE high part
1464 of the value in mode INNERMODE stored in memory in target format. */
1465unsigned int
502b8322 1466subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1467{
1468 unsigned int offset = 0;
1469 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1470
5b0264cb 1471 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1472
eea50aa0
JH
1473 if (difference > 0)
1474 {
e0e08ac2 1475 if (! WORDS_BIG_ENDIAN)
eea50aa0 1476 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1477 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1478 offset += difference % UNITS_PER_WORD;
1479 }
1480
e0e08ac2 1481 return offset;
eea50aa0 1482}
ccba022b 1483
23b2ce53
RS
1484/* Return 1 iff X, assumed to be a SUBREG,
1485 refers to the least significant part of its containing reg.
1486 If X is not a SUBREG, always return 1 (it is its own low part!). */
1487
1488int
fa233e34 1489subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1490{
1491 if (GET_CODE (x) != SUBREG)
1492 return 1;
a3a03040
RK
1493 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1494 return 0;
23b2ce53 1495
e0e08ac2
JH
1496 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1497 == SUBREG_BYTE (x));
23b2ce53 1498}
6a4bdc79
BS
1499
1500/* Return true if X is a paradoxical subreg, false otherwise. */
1501bool
1502paradoxical_subreg_p (const_rtx x)
1503{
1504 if (GET_CODE (x) != SUBREG)
1505 return false;
1506 return (GET_MODE_PRECISION (GET_MODE (x))
1507 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1508}
23b2ce53 1509\f
ddef6bc7
JJ
1510/* Return subword OFFSET of operand OP.
1511 The word number, OFFSET, is interpreted as the word number starting
1512 at the low-order address. OFFSET 0 is the low-order word if not
1513 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1514
1515 If we cannot extract the required word, we return zero. Otherwise,
1516 an rtx corresponding to the requested word will be returned.
1517
1518 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1519 reload has completed, a valid address will always be returned. After
1520 reload, if a valid address cannot be returned, we return zero.
1521
1522 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1523 it is the responsibility of the caller.
1524
1525 MODE is the mode of OP in case it is a CONST_INT.
1526
1527 ??? This is still rather broken for some cases. The problem for the
1528 moment is that all callers of this thing provide no 'goal mode' to
1529 tell us to work with. This exists because all callers were written
0631e0bf
JH
1530 in a word based SUBREG world.
1531 Now use of this function can be deprecated by simplify_subreg in most
1532 cases.
1533 */
ddef6bc7
JJ
1534
1535rtx
502b8322 1536operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1537{
1538 if (mode == VOIDmode)
1539 mode = GET_MODE (op);
1540
5b0264cb 1541 gcc_assert (mode != VOIDmode);
ddef6bc7 1542
30f7a378 1543 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1544 if (mode != BLKmode
1545 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1546 return 0;
1547
30f7a378 1548 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1549 if (mode != BLKmode
1550 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1551 return const0_rtx;
1552
ddef6bc7 1553 /* Form a new MEM at the requested address. */
3c0cb5de 1554 if (MEM_P (op))
ddef6bc7 1555 {
60564289 1556 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1557
f1ec5147 1558 if (! validate_address)
60564289 1559 return new_rtx;
f1ec5147
RK
1560
1561 else if (reload_completed)
ddef6bc7 1562 {
09e881c9
BE
1563 if (! strict_memory_address_addr_space_p (word_mode,
1564 XEXP (new_rtx, 0),
1565 MEM_ADDR_SPACE (op)))
f1ec5147 1566 return 0;
ddef6bc7 1567 }
f1ec5147 1568 else
60564289 1569 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1570 }
1571
0631e0bf
JH
1572 /* Rest can be handled by simplify_subreg. */
1573 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1574}
1575
535a42b1
NS
1576/* Similar to `operand_subword', but never return 0. If we can't
1577 extract the required subword, put OP into a register and try again.
1578 The second attempt must succeed. We always validate the address in
1579 this case.
23b2ce53
RS
1580
1581 MODE is the mode of OP, in case it is CONST_INT. */
1582
1583rtx
502b8322 1584operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1585{
ddef6bc7 1586 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1587
1588 if (result)
1589 return result;
1590
1591 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1592 {
1593 /* If this is a register which can not be accessed by words, copy it
1594 to a pseudo register. */
f8cfc6aa 1595 if (REG_P (op))
77e6b0eb
JC
1596 op = copy_to_reg (op);
1597 else
1598 op = force_reg (mode, op);
1599 }
23b2ce53 1600
ddef6bc7 1601 result = operand_subword (op, offset, 1, mode);
5b0264cb 1602 gcc_assert (result);
23b2ce53
RS
1603
1604 return result;
1605}
1606\f
2b3493c8
AK
1607/* Returns 1 if both MEM_EXPR can be considered equal
1608 and 0 otherwise. */
1609
1610int
4f588890 1611mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1612{
1613 if (expr1 == expr2)
1614 return 1;
1615
1616 if (! expr1 || ! expr2)
1617 return 0;
1618
1619 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1620 return 0;
1621
55b34b5f 1622 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1623}
1624
805903b5
JJ
1625/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1626 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1627 -1 if not known. */
1628
1629int
d9223014 1630get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1631{
1632 tree expr;
1633 unsigned HOST_WIDE_INT offset;
1634
1635 /* This function can't use
527210c4 1636 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1637 || (MAX (MEM_ALIGN (mem),
0eb77834 1638 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1639 < align))
1640 return -1;
1641 else
527210c4 1642 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1643 for two reasons:
1644 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1645 for <variable>. get_inner_reference doesn't handle it and
1646 even if it did, the alignment in that case needs to be determined
1647 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1648 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1649 isn't sufficiently aligned, the object it is in might be. */
1650 gcc_assert (MEM_P (mem));
1651 expr = MEM_EXPR (mem);
527210c4 1652 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1653 return -1;
1654
527210c4 1655 offset = MEM_OFFSET (mem);
805903b5
JJ
1656 if (DECL_P (expr))
1657 {
1658 if (DECL_ALIGN (expr) < align)
1659 return -1;
1660 }
1661 else if (INDIRECT_REF_P (expr))
1662 {
1663 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1664 return -1;
1665 }
1666 else if (TREE_CODE (expr) == COMPONENT_REF)
1667 {
1668 while (1)
1669 {
1670 tree inner = TREE_OPERAND (expr, 0);
1671 tree field = TREE_OPERAND (expr, 1);
1672 tree byte_offset = component_ref_field_offset (expr);
1673 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1674
1675 if (!byte_offset
cc269bb6
RS
1676 || !tree_fits_uhwi_p (byte_offset)
1677 || !tree_fits_uhwi_p (bit_offset))
805903b5
JJ
1678 return -1;
1679
ae7e9ddd
RS
1680 offset += tree_to_uhwi (byte_offset);
1681 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
805903b5
JJ
1682
1683 if (inner == NULL_TREE)
1684 {
1685 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1686 < (unsigned int) align)
1687 return -1;
1688 break;
1689 }
1690 else if (DECL_P (inner))
1691 {
1692 if (DECL_ALIGN (inner) < align)
1693 return -1;
1694 break;
1695 }
1696 else if (TREE_CODE (inner) != COMPONENT_REF)
1697 return -1;
1698 expr = inner;
1699 }
1700 }
1701 else
1702 return -1;
1703
1704 return offset & ((align / BITS_PER_UNIT) - 1);
1705}
1706
6926c713 1707/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1708 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1709 if we are making a new object of this type. BITPOS is nonzero if
1710 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1711
1712void
502b8322
AJ
1713set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1714 HOST_WIDE_INT bitpos)
173b24b9 1715{
6f1087be 1716 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1717 tree type;
f12144dd 1718 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1719 addr_space_t as;
173b24b9
RK
1720
1721 /* It can happen that type_for_mode was given a mode for which there
1722 is no language-level type. In which case it returns NULL, which
1723 we can see here. */
1724 if (t == NULL_TREE)
1725 return;
1726
1727 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1728 if (type == error_mark_node)
1729 return;
173b24b9 1730
173b24b9
RK
1731 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1732 wrong answer, as it assumes that DECL_RTL already has the right alias
1733 info. Callers should not set DECL_RTL until after the call to
1734 set_mem_attributes. */
5b0264cb 1735 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1736
f12144dd
RS
1737 memset (&attrs, 0, sizeof (attrs));
1738
738cc472 1739 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1740 front-end routine) and use it. */
f12144dd 1741 attrs.alias = get_alias_set (t);
173b24b9 1742
a5e9c810 1743 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1744 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1745
268f7033 1746 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1747 refattrs = MEM_ATTRS (ref);
1748 if (refattrs)
268f7033
UW
1749 {
1750 /* ??? Can this ever happen? Calling this routine on a MEM that
1751 already carries memory attributes should probably be invalid. */
f12144dd 1752 attrs.expr = refattrs->expr;
754c3d5d 1753 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1754 attrs.offset = refattrs->offset;
754c3d5d 1755 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1756 attrs.size = refattrs->size;
1757 attrs.align = refattrs->align;
268f7033
UW
1758 }
1759
1760 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1761 else
268f7033 1762 {
f12144dd
RS
1763 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1764 gcc_assert (!defattrs->expr);
754c3d5d 1765 gcc_assert (!defattrs->offset_known_p);
f12144dd 1766
268f7033 1767 /* Respect mode size. */
754c3d5d 1768 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1769 attrs.size = defattrs->size;
268f7033
UW
1770 /* ??? Is this really necessary? We probably should always get
1771 the size from the type below. */
1772
1773 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1774 if T is an object, always compute the object alignment below. */
f12144dd
RS
1775 if (TYPE_P (t))
1776 attrs.align = defattrs->align;
1777 else
1778 attrs.align = BITS_PER_UNIT;
268f7033
UW
1779 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1780 e.g. if the type carries an alignment attribute. Should we be
1781 able to simply always use TYPE_ALIGN? */
1782 }
1783
c3d32120
RK
1784 /* We can set the alignment from the type if we are making an object,
1785 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1786 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1787 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1788
738cc472 1789 /* If the size is known, we can set that. */
a787ccc3 1790 tree new_size = TYPE_SIZE_UNIT (type);
738cc472 1791
30b0317c
RB
1792 /* The address-space is that of the type. */
1793 as = TYPE_ADDR_SPACE (type);
1794
80965c18
RK
1795 /* If T is not a type, we may be able to deduce some more information about
1796 the expression. */
1797 if (! TYPE_P (t))
8ac61af7 1798 {
8476af98 1799 tree base;
389fdba0 1800
8ac61af7
RK
1801 if (TREE_THIS_VOLATILE (t))
1802 MEM_VOLATILE_P (ref) = 1;
173b24b9 1803
c56e3582
RK
1804 /* Now remove any conversions: they don't change what the underlying
1805 object is. Likewise for SAVE_EXPR. */
1043771b 1806 while (CONVERT_EXPR_P (t)
c56e3582
RK
1807 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1808 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1809 t = TREE_OPERAND (t, 0);
1810
4994da65
RG
1811 /* Note whether this expression can trap. */
1812 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1813
1814 base = get_base_address (t);
f18a7b25
MJ
1815 if (base)
1816 {
1817 if (DECL_P (base)
1818 && TREE_READONLY (base)
1819 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1820 && !TREE_THIS_VOLATILE (base))
1821 MEM_READONLY_P (ref) = 1;
1822
1823 /* Mark static const strings readonly as well. */
1824 if (TREE_CODE (base) == STRING_CST
1825 && TREE_READONLY (base)
1826 && TREE_STATIC (base))
1827 MEM_READONLY_P (ref) = 1;
1828
30b0317c 1829 /* Address-space information is on the base object. */
f18a7b25
MJ
1830 if (TREE_CODE (base) == MEM_REF
1831 || TREE_CODE (base) == TARGET_MEM_REF)
1832 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1833 0))));
1834 else
1835 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1836 }
ba30e50d 1837
2039d7aa
RH
1838 /* If this expression uses it's parent's alias set, mark it such
1839 that we won't change it. */
b4ada065 1840 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
10b76d73
RK
1841 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1842
8ac61af7
RK
1843 /* If this is a decl, set the attributes of the MEM from it. */
1844 if (DECL_P (t))
1845 {
f12144dd 1846 attrs.expr = t;
754c3d5d
RS
1847 attrs.offset_known_p = true;
1848 attrs.offset = 0;
6f1087be 1849 apply_bitpos = bitpos;
a787ccc3 1850 new_size = DECL_SIZE_UNIT (t);
8ac61af7
RK
1851 }
1852
30b0317c 1853 /* ??? If we end up with a constant here do record a MEM_EXPR. */
6615c446 1854 else if (CONSTANT_CLASS_P (t))
30b0317c 1855 ;
998d7deb 1856
a787ccc3
RS
1857 /* If this is a field reference, record it. */
1858 else if (TREE_CODE (t) == COMPONENT_REF)
998d7deb 1859 {
f12144dd 1860 attrs.expr = t;
754c3d5d
RS
1861 attrs.offset_known_p = true;
1862 attrs.offset = 0;
6f1087be 1863 apply_bitpos = bitpos;
a787ccc3
RS
1864 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1865 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
998d7deb
RH
1866 }
1867
1868 /* If this is an array reference, look for an outer field reference. */
1869 else if (TREE_CODE (t) == ARRAY_REF)
1870 {
1871 tree off_tree = size_zero_node;
1b1838b6
JW
1872 /* We can't modify t, because we use it at the end of the
1873 function. */
1874 tree t2 = t;
998d7deb
RH
1875
1876 do
1877 {
1b1838b6 1878 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1879 tree low_bound = array_ref_low_bound (t2);
1880 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1881
1882 /* We assume all arrays have sizes that are a multiple of a byte.
1883 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1884 index, then convert to sizetype and multiply by the size of
1885 the array element. */
1886 if (! integer_zerop (low_bound))
4845b383
KH
1887 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1888 index, low_bound);
2567406a 1889
44de5aeb 1890 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1891 size_binop (MULT_EXPR,
1892 fold_convert (sizetype,
1893 index),
44de5aeb
RK
1894 unit_size),
1895 off_tree);
1b1838b6 1896 t2 = TREE_OPERAND (t2, 0);
998d7deb 1897 }
1b1838b6 1898 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1899
30b0317c
RB
1900 if (DECL_P (t2)
1901 || TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1902 {
f12144dd 1903 attrs.expr = t2;
754c3d5d 1904 attrs.offset_known_p = false;
cc269bb6 1905 if (tree_fits_uhwi_p (off_tree))
6f1087be 1906 {
754c3d5d 1907 attrs.offset_known_p = true;
ae7e9ddd 1908 attrs.offset = tree_to_uhwi (off_tree);
6f1087be
RH
1909 apply_bitpos = bitpos;
1910 }
998d7deb 1911 }
30b0317c 1912 /* Else do not record a MEM_EXPR. */
c67a1cf6
RH
1913 }
1914
56c47f22 1915 /* If this is an indirect reference, record it. */
70f34814 1916 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1917 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1918 {
f12144dd 1919 attrs.expr = t;
754c3d5d
RS
1920 attrs.offset_known_p = true;
1921 attrs.offset = 0;
56c47f22
RG
1922 apply_bitpos = bitpos;
1923 }
1924
30b0317c
RB
1925 /* Compute the alignment. */
1926 unsigned int obj_align;
1927 unsigned HOST_WIDE_INT obj_bitpos;
1928 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1929 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1930 if (obj_bitpos != 0)
1931 obj_align = (obj_bitpos & -obj_bitpos);
1932 attrs.align = MAX (attrs.align, obj_align);
8ac61af7
RK
1933 }
1934
cc269bb6 1935 if (tree_fits_uhwi_p (new_size))
a787ccc3
RS
1936 {
1937 attrs.size_known_p = true;
ae7e9ddd 1938 attrs.size = tree_to_uhwi (new_size);
a787ccc3
RS
1939 }
1940
15c812e3 1941 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1942 bit position offset. Similarly, increase the size of the accessed
1943 object to contain the negative offset. */
6f1087be 1944 if (apply_bitpos)
8c317c5f 1945 {
754c3d5d
RS
1946 gcc_assert (attrs.offset_known_p);
1947 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1948 if (attrs.size_known_p)
1949 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1950 }
6f1087be 1951
8ac61af7 1952 /* Now set the attributes we computed above. */
f18a7b25 1953 attrs.addrspace = as;
f12144dd 1954 set_mem_attrs (ref, &attrs);
173b24b9
RK
1955}
1956
6f1087be 1957void
502b8322 1958set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1959{
1960 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1961}
1962
173b24b9
RK
1963/* Set the alias set of MEM to SET. */
1964
1965void
4862826d 1966set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1967{
f12144dd
RS
1968 struct mem_attrs attrs;
1969
173b24b9 1970 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1971 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
1972 attrs = *get_mem_attrs (mem);
1973 attrs.alias = set;
1974 set_mem_attrs (mem, &attrs);
09e881c9
BE
1975}
1976
1977/* Set the address space of MEM to ADDRSPACE (target-defined). */
1978
1979void
1980set_mem_addr_space (rtx mem, addr_space_t addrspace)
1981{
f12144dd
RS
1982 struct mem_attrs attrs;
1983
1984 attrs = *get_mem_attrs (mem);
1985 attrs.addrspace = addrspace;
1986 set_mem_attrs (mem, &attrs);
173b24b9 1987}
738cc472 1988
d022d93e 1989/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1990
1991void
502b8322 1992set_mem_align (rtx mem, unsigned int align)
738cc472 1993{
f12144dd
RS
1994 struct mem_attrs attrs;
1995
1996 attrs = *get_mem_attrs (mem);
1997 attrs.align = align;
1998 set_mem_attrs (mem, &attrs);
738cc472 1999}
1285011e 2000
998d7deb 2001/* Set the expr for MEM to EXPR. */
1285011e
RK
2002
2003void
502b8322 2004set_mem_expr (rtx mem, tree expr)
1285011e 2005{
f12144dd
RS
2006 struct mem_attrs attrs;
2007
2008 attrs = *get_mem_attrs (mem);
2009 attrs.expr = expr;
2010 set_mem_attrs (mem, &attrs);
1285011e 2011}
998d7deb
RH
2012
2013/* Set the offset of MEM to OFFSET. */
2014
2015void
527210c4 2016set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 2017{
f12144dd
RS
2018 struct mem_attrs attrs;
2019
2020 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2021 attrs.offset_known_p = true;
2022 attrs.offset = offset;
527210c4
RS
2023 set_mem_attrs (mem, &attrs);
2024}
2025
2026/* Clear the offset of MEM. */
2027
2028void
2029clear_mem_offset (rtx mem)
2030{
2031 struct mem_attrs attrs;
2032
2033 attrs = *get_mem_attrs (mem);
754c3d5d 2034 attrs.offset_known_p = false;
f12144dd 2035 set_mem_attrs (mem, &attrs);
35aff10b
AM
2036}
2037
2038/* Set the size of MEM to SIZE. */
2039
2040void
f5541398 2041set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 2042{
f12144dd
RS
2043 struct mem_attrs attrs;
2044
2045 attrs = *get_mem_attrs (mem);
754c3d5d
RS
2046 attrs.size_known_p = true;
2047 attrs.size = size;
f5541398
RS
2048 set_mem_attrs (mem, &attrs);
2049}
2050
2051/* Clear the size of MEM. */
2052
2053void
2054clear_mem_size (rtx mem)
2055{
2056 struct mem_attrs attrs;
2057
2058 attrs = *get_mem_attrs (mem);
754c3d5d 2059 attrs.size_known_p = false;
f12144dd 2060 set_mem_attrs (mem, &attrs);
998d7deb 2061}
173b24b9 2062\f
738cc472
RK
2063/* Return a memory reference like MEMREF, but with its mode changed to MODE
2064 and its address changed to ADDR. (VOIDmode means don't change the mode.
2065 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
23b33725
RS
2066 returned memory location is required to be valid. INPLACE is true if any
2067 changes can be made directly to MEMREF or false if MEMREF must be treated
2068 as immutable.
2069
2070 The memory attributes are not changed. */
23b2ce53 2071
738cc472 2072static rtx
23b33725
RS
2073change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate,
2074 bool inplace)
23b2ce53 2075{
09e881c9 2076 addr_space_t as;
60564289 2077 rtx new_rtx;
23b2ce53 2078
5b0264cb 2079 gcc_assert (MEM_P (memref));
09e881c9 2080 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2081 if (mode == VOIDmode)
2082 mode = GET_MODE (memref);
2083 if (addr == 0)
2084 addr = XEXP (memref, 0);
a74ff877 2085 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2086 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2087 return memref;
23b2ce53 2088
91c5ee5b
VM
2089 /* Don't validate address for LRA. LRA can make the address valid
2090 by itself in most efficient way. */
2091 if (validate && !lra_in_progress)
23b2ce53 2092 {
f1ec5147 2093 if (reload_in_progress || reload_completed)
09e881c9 2094 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2095 else
09e881c9 2096 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2097 }
750c9258 2098
9b04c6a8
RK
2099 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2100 return memref;
2101
23b33725
RS
2102 if (inplace)
2103 {
2104 XEXP (memref, 0) = addr;
2105 return memref;
2106 }
2107
60564289
KG
2108 new_rtx = gen_rtx_MEM (mode, addr);
2109 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2110 return new_rtx;
23b2ce53 2111}
792760b9 2112
738cc472
RK
2113/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2114 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2115
2116rtx
502b8322 2117change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 2118{
23b33725 2119 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
60564289 2120 enum machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2121 struct mem_attrs attrs, *defattrs;
4e44c1ef 2122
f12144dd
RS
2123 attrs = *get_mem_attrs (memref);
2124 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2125 attrs.expr = NULL_TREE;
2126 attrs.offset_known_p = false;
2127 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2128 attrs.size = defattrs->size;
2129 attrs.align = defattrs->align;
c2f7bcc3 2130
fdb1c7b3 2131 /* If there are no changes, just return the original memory reference. */
60564289 2132 if (new_rtx == memref)
4e44c1ef 2133 {
f12144dd 2134 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2135 return new_rtx;
4e44c1ef 2136
60564289
KG
2137 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2138 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2139 }
fdb1c7b3 2140
f12144dd 2141 set_mem_attrs (new_rtx, &attrs);
60564289 2142 return new_rtx;
f4ef873c 2143}
792760b9 2144
738cc472
RK
2145/* Return a memory reference like MEMREF, but with its mode changed
2146 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6 2147 nonzero, the memory address is forced to be valid.
5ef0b50d
EB
2148 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2149 and the caller is responsible for adjusting MEMREF base register.
2150 If ADJUST_OBJECT is zero, the underlying object associated with the
2151 memory reference is left unchanged and the caller is responsible for
2152 dealing with it. Otherwise, if the new memory reference is outside
5f2cbd0d
RS
2153 the underlying object, even partially, then the object is dropped.
2154 SIZE, if nonzero, is the size of an access in cases where MODE
2155 has no inherent size. */
f1ec5147
RK
2156
2157rtx
502b8322 2158adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
5f2cbd0d
RS
2159 int validate, int adjust_address, int adjust_object,
2160 HOST_WIDE_INT size)
f1ec5147 2161{
823e3574 2162 rtx addr = XEXP (memref, 0);
60564289 2163 rtx new_rtx;
f12144dd 2164 enum machine_mode address_mode;
a6fe9ed4 2165 int pbits;
0207fa90 2166 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
f12144dd 2167 unsigned HOST_WIDE_INT max_align;
0207fa90
EB
2168#ifdef POINTERS_EXTEND_UNSIGNED
2169 enum machine_mode pointer_mode
2170 = targetm.addr_space.pointer_mode (attrs.addrspace);
2171#endif
823e3574 2172
ee88e690
EB
2173 /* VOIDmode means no mode change for change_address_1. */
2174 if (mode == VOIDmode)
2175 mode = GET_MODE (memref);
2176
5f2cbd0d
RS
2177 /* Take the size of non-BLKmode accesses from the mode. */
2178 defattrs = mode_mem_attrs[(int) mode];
2179 if (defattrs->size_known_p)
2180 size = defattrs->size;
2181
fdb1c7b3
JH
2182 /* If there are no changes, just return the original memory reference. */
2183 if (mode == GET_MODE (memref) && !offset
5f2cbd0d 2184 && (size == 0 || (attrs.size_known_p && attrs.size == size))
f12144dd
RS
2185 && (!validate || memory_address_addr_space_p (mode, addr,
2186 attrs.addrspace)))
fdb1c7b3
JH
2187 return memref;
2188
d14419e4 2189 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2190 This may happen even if offset is nonzero -- consider
d14419e4
RH
2191 (plus (plus reg reg) const_int) -- so do this always. */
2192 addr = copy_rtx (addr);
2193
a6fe9ed4
JM
2194 /* Convert a possibly large offset to a signed value within the
2195 range of the target address space. */
372d6395 2196 address_mode = get_address_mode (memref);
d4ebfa65 2197 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2198 if (HOST_BITS_PER_WIDE_INT > pbits)
2199 {
2200 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2201 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2202 >> shift);
2203 }
2204
5ef0b50d 2205 if (adjust_address)
4a78c787
RH
2206 {
2207 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2208 object, we can merge it into the LO_SUM. */
2209 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2210 && offset >= 0
2211 && (unsigned HOST_WIDE_INT) offset
2212 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2213 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2214 plus_constant (address_mode,
2215 XEXP (addr, 1), offset));
0207fa90
EB
2216#ifdef POINTERS_EXTEND_UNSIGNED
2217 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2218 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2219 the fact that pointers are not allowed to overflow. */
2220 else if (POINTERS_EXTEND_UNSIGNED > 0
2221 && GET_CODE (addr) == ZERO_EXTEND
2222 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2223 && trunc_int_for_mode (offset, pointer_mode) == offset)
2224 addr = gen_rtx_ZERO_EXTEND (address_mode,
2225 plus_constant (pointer_mode,
2226 XEXP (addr, 0), offset));
2227#endif
4a78c787 2228 else
0a81f074 2229 addr = plus_constant (address_mode, addr, offset);
4a78c787 2230 }
823e3574 2231
23b33725 2232 new_rtx = change_address_1 (memref, mode, addr, validate, false);
738cc472 2233
09efeca1
PB
2234 /* If the address is a REG, change_address_1 rightfully returns memref,
2235 but this would destroy memref's MEM_ATTRS. */
2236 if (new_rtx == memref && offset != 0)
2237 new_rtx = copy_rtx (new_rtx);
2238
5ef0b50d
EB
2239 /* Conservatively drop the object if we don't know where we start from. */
2240 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2241 {
2242 attrs.expr = NULL_TREE;
2243 attrs.alias = 0;
2244 }
2245
738cc472
RK
2246 /* Compute the new values of the memory attributes due to this adjustment.
2247 We add the offsets and update the alignment. */
754c3d5d 2248 if (attrs.offset_known_p)
5ef0b50d
EB
2249 {
2250 attrs.offset += offset;
2251
2252 /* Drop the object if the new left end is not within its bounds. */
2253 if (adjust_object && attrs.offset < 0)
2254 {
2255 attrs.expr = NULL_TREE;
2256 attrs.alias = 0;
2257 }
2258 }
738cc472 2259
03bf2c23
RK
2260 /* Compute the new alignment by taking the MIN of the alignment and the
2261 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2262 if zero. */
2263 if (offset != 0)
f12144dd
RS
2264 {
2265 max_align = (offset & -offset) * BITS_PER_UNIT;
2266 attrs.align = MIN (attrs.align, max_align);
2267 }
738cc472 2268
5f2cbd0d 2269 if (size)
754c3d5d 2270 {
5ef0b50d 2271 /* Drop the object if the new right end is not within its bounds. */
5f2cbd0d 2272 if (adjust_object && (offset + size) > attrs.size)
5ef0b50d
EB
2273 {
2274 attrs.expr = NULL_TREE;
2275 attrs.alias = 0;
2276 }
754c3d5d 2277 attrs.size_known_p = true;
5f2cbd0d 2278 attrs.size = size;
754c3d5d
RS
2279 }
2280 else if (attrs.size_known_p)
5ef0b50d 2281 {
5f2cbd0d 2282 gcc_assert (!adjust_object);
5ef0b50d 2283 attrs.size -= offset;
5f2cbd0d
RS
2284 /* ??? The store_by_pieces machinery generates negative sizes,
2285 so don't assert for that here. */
5ef0b50d 2286 }
10b76d73 2287
f12144dd 2288 set_mem_attrs (new_rtx, &attrs);
738cc472 2289
60564289 2290 return new_rtx;
f1ec5147
RK
2291}
2292
630036c6
JJ
2293/* Return a memory reference like MEMREF, but with its mode changed
2294 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2295 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2296 nonzero, the memory address is forced to be valid. */
2297
2298rtx
502b8322
AJ
2299adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2300 HOST_WIDE_INT offset, int validate)
630036c6 2301{
23b33725 2302 memref = change_address_1 (memref, VOIDmode, addr, validate, false);
5f2cbd0d 2303 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
630036c6
JJ
2304}
2305
8ac61af7
RK
2306/* Return a memory reference like MEMREF, but whose address is changed by
2307 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2308 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2309
2310rtx
502b8322 2311offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2312{
60564289 2313 rtx new_rtx, addr = XEXP (memref, 0);
f12144dd 2314 enum machine_mode address_mode;
754c3d5d 2315 struct mem_attrs attrs, *defattrs;
e3c8ea67 2316
f12144dd 2317 attrs = *get_mem_attrs (memref);
372d6395 2318 address_mode = get_address_mode (memref);
d4ebfa65 2319 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2320
68252e27 2321 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2322 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2323
2324 However, if we did go and rearrange things, we can wind up not
2325 being able to recognize the magic around pic_offset_table_rtx.
2326 This stuff is fragile, and is yet another example of why it is
2327 bad to expose PIC machinery too early. */
f12144dd
RS
2328 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2329 attrs.addrspace)
e3c8ea67
RH
2330 && GET_CODE (addr) == PLUS
2331 && XEXP (addr, 0) == pic_offset_table_rtx)
2332 {
2333 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2334 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2335 }
2336
60564289 2337 update_temp_slot_address (XEXP (memref, 0), new_rtx);
23b33725 2338 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
0d4903b8 2339
fdb1c7b3 2340 /* If there are no changes, just return the original memory reference. */
60564289
KG
2341 if (new_rtx == memref)
2342 return new_rtx;
fdb1c7b3 2343
0d4903b8
RK
2344 /* Update the alignment to reflect the offset. Reset the offset, which
2345 we don't know. */
754c3d5d
RS
2346 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2347 attrs.offset_known_p = false;
2348 attrs.size_known_p = defattrs->size_known_p;
2349 attrs.size = defattrs->size;
f12144dd
RS
2350 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2351 set_mem_attrs (new_rtx, &attrs);
60564289 2352 return new_rtx;
0d4903b8 2353}
68252e27 2354
792760b9
RK
2355/* Return a memory reference like MEMREF, but with its address changed to
2356 ADDR. The caller is asserting that the actual piece of memory pointed
2357 to is the same, just the form of the address is being changed, such as
23b33725
RS
2358 by putting something into a register. INPLACE is true if any changes
2359 can be made directly to MEMREF or false if MEMREF must be treated as
2360 immutable. */
792760b9
RK
2361
2362rtx
23b33725 2363replace_equiv_address (rtx memref, rtx addr, bool inplace)
792760b9 2364{
738cc472
RK
2365 /* change_address_1 copies the memory attribute structure without change
2366 and that's exactly what we want here. */
40c0668b 2367 update_temp_slot_address (XEXP (memref, 0), addr);
23b33725 2368 return change_address_1 (memref, VOIDmode, addr, 1, inplace);
792760b9 2369}
738cc472 2370
f1ec5147
RK
2371/* Likewise, but the reference is not required to be valid. */
2372
2373rtx
23b33725 2374replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
f1ec5147 2375{
23b33725 2376 return change_address_1 (memref, VOIDmode, addr, 0, inplace);
f1ec5147 2377}
e7dfe4bb
RH
2378
2379/* Return a memory reference like MEMREF, but with its mode widened to
2380 MODE and offset by OFFSET. This would be used by targets that e.g.
2381 cannot issue QImode memory operations and have to use SImode memory
2382 operations plus masking logic. */
2383
2384rtx
502b8322 2385widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2386{
5f2cbd0d 2387 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
f12144dd 2388 struct mem_attrs attrs;
e7dfe4bb
RH
2389 unsigned int size = GET_MODE_SIZE (mode);
2390
fdb1c7b3 2391 /* If there are no changes, just return the original memory reference. */
60564289
KG
2392 if (new_rtx == memref)
2393 return new_rtx;
fdb1c7b3 2394
f12144dd
RS
2395 attrs = *get_mem_attrs (new_rtx);
2396
e7dfe4bb
RH
2397 /* If we don't know what offset we were at within the expression, then
2398 we can't know if we've overstepped the bounds. */
754c3d5d 2399 if (! attrs.offset_known_p)
f12144dd 2400 attrs.expr = NULL_TREE;
e7dfe4bb 2401
f12144dd 2402 while (attrs.expr)
e7dfe4bb 2403 {
f12144dd 2404 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2405 {
f12144dd
RS
2406 tree field = TREE_OPERAND (attrs.expr, 1);
2407 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2408
2409 if (! DECL_SIZE_UNIT (field))
2410 {
f12144dd 2411 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2412 break;
2413 }
2414
2415 /* Is the field at least as large as the access? If so, ok,
2416 otherwise strip back to the containing structure. */
03667700
RK
2417 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2418 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2419 && attrs.offset >= 0)
e7dfe4bb
RH
2420 break;
2421
cc269bb6 2422 if (! tree_fits_uhwi_p (offset))
e7dfe4bb 2423 {
f12144dd 2424 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2425 break;
2426 }
2427
f12144dd 2428 attrs.expr = TREE_OPERAND (attrs.expr, 0);
ae7e9ddd
RS
2429 attrs.offset += tree_to_uhwi (offset);
2430 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
754c3d5d 2431 / BITS_PER_UNIT);
e7dfe4bb
RH
2432 }
2433 /* Similarly for the decl. */
f12144dd
RS
2434 else if (DECL_P (attrs.expr)
2435 && DECL_SIZE_UNIT (attrs.expr)
2436 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2437 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2438 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2439 break;
2440 else
2441 {
2442 /* The widened memory access overflows the expression, which means
2443 that it could alias another expression. Zap it. */
f12144dd 2444 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2445 break;
2446 }
2447 }
2448
f12144dd 2449 if (! attrs.expr)
754c3d5d 2450 attrs.offset_known_p = false;
e7dfe4bb
RH
2451
2452 /* The widened memory may alias other stuff, so zap the alias set. */
2453 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2454 attrs.alias = 0;
754c3d5d
RS
2455 attrs.size_known_p = true;
2456 attrs.size = size;
f12144dd 2457 set_mem_attrs (new_rtx, &attrs);
60564289 2458 return new_rtx;
e7dfe4bb 2459}
23b2ce53 2460\f
f6129d66
RH
2461/* A fake decl that is used as the MEM_EXPR of spill slots. */
2462static GTY(()) tree spill_slot_decl;
2463
3d7e23f6
RH
2464tree
2465get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2466{
2467 tree d = spill_slot_decl;
2468 rtx rd;
f12144dd 2469 struct mem_attrs attrs;
f6129d66 2470
3d7e23f6 2471 if (d || !force_build_p)
f6129d66
RH
2472 return d;
2473
c2255bc4
AH
2474 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2475 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2476 DECL_ARTIFICIAL (d) = 1;
2477 DECL_IGNORED_P (d) = 1;
2478 TREE_USED (d) = 1;
f6129d66
RH
2479 spill_slot_decl = d;
2480
2481 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2482 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2483 attrs = *mode_mem_attrs[(int) BLKmode];
2484 attrs.alias = new_alias_set ();
2485 attrs.expr = d;
2486 set_mem_attrs (rd, &attrs);
f6129d66
RH
2487 SET_DECL_RTL (d, rd);
2488
2489 return d;
2490}
2491
2492/* Given MEM, a result from assign_stack_local, fill in the memory
2493 attributes as appropriate for a register allocator spill slot.
2494 These slots are not aliasable by other memory. We arrange for
2495 them all to use a single MEM_EXPR, so that the aliasing code can
2496 work properly in the case of shared spill slots. */
2497
2498void
2499set_mem_attrs_for_spill (rtx mem)
2500{
f12144dd
RS
2501 struct mem_attrs attrs;
2502 rtx addr;
f6129d66 2503
f12144dd
RS
2504 attrs = *get_mem_attrs (mem);
2505 attrs.expr = get_spill_slot_decl (true);
2506 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2507 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2508
2509 /* We expect the incoming memory to be of the form:
2510 (mem:MODE (plus (reg sfp) (const_int offset)))
2511 with perhaps the plus missing for offset = 0. */
2512 addr = XEXP (mem, 0);
754c3d5d
RS
2513 attrs.offset_known_p = true;
2514 attrs.offset = 0;
f6129d66 2515 if (GET_CODE (addr) == PLUS
481683e1 2516 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2517 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2518
f12144dd 2519 set_mem_attrs (mem, &attrs);
f6129d66
RH
2520 MEM_NOTRAP_P (mem) = 1;
2521}
2522\f
23b2ce53
RS
2523/* Return a newly created CODE_LABEL rtx with a unique label number. */
2524
7dcc3ab5 2525rtx_code_label *
502b8322 2526gen_label_rtx (void)
23b2ce53 2527{
7dcc3ab5
DM
2528 return as_a <rtx_code_label *> (
2529 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2530 NULL, label_num++, NULL));
23b2ce53
RS
2531}
2532\f
2533/* For procedure integration. */
2534
23b2ce53 2535/* Install new pointers to the first and last insns in the chain.
86fe05e0 2536 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2537 Used for an inline-procedure after copying the insn chain. */
2538
2539void
fee3e72c 2540set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
23b2ce53 2541{
fee3e72c 2542 rtx_insn *insn;
86fe05e0 2543
5936d944
JH
2544 set_first_insn (first);
2545 set_last_insn (last);
86fe05e0
RK
2546 cur_insn_uid = 0;
2547
b5b8b0ac
AO
2548 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2549 {
2550 int debug_count = 0;
2551
2552 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2553 cur_debug_insn_uid = 0;
2554
2555 for (insn = first; insn; insn = NEXT_INSN (insn))
2556 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2557 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2558 else
2559 {
2560 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2561 if (DEBUG_INSN_P (insn))
2562 debug_count++;
2563 }
2564
2565 if (debug_count)
2566 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2567 else
2568 cur_debug_insn_uid++;
2569 }
2570 else
2571 for (insn = first; insn; insn = NEXT_INSN (insn))
2572 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2573
2574 cur_insn_uid++;
23b2ce53 2575}
23b2ce53 2576\f
750c9258 2577/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2578 structure. This routine should only be called once. */
23b2ce53 2579
fd743bc1 2580static void
6bb9bf63 2581unshare_all_rtl_1 (rtx_insn *insn)
23b2ce53 2582{
d1b81779 2583 /* Unshare just about everything else. */
2c07f13b 2584 unshare_all_rtl_in_chain (insn);
750c9258 2585
23b2ce53
RS
2586 /* Make sure the addresses of stack slots found outside the insn chain
2587 (such as, in DECL_RTL of a variable) are not shared
2588 with the insn chain.
2589
2590 This special care is necessary when the stack slot MEM does not
2591 actually appear in the insn chain. If it does appear, its address
2592 is unshared from all else at that point. */
0f4783c7
DM
2593 stack_slot_list = safe_as_a <rtx_expr_list *> (
2594 copy_rtx_if_shared (stack_slot_list));
23b2ce53
RS
2595}
2596
750c9258 2597/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2598 structure, again. This is a fairly expensive thing to do so it
2599 should be done sparingly. */
2600
2601void
6bb9bf63 2602unshare_all_rtl_again (rtx_insn *insn)
d1b81779 2603{
6bb9bf63 2604 rtx_insn *p;
624c87aa
RE
2605 tree decl;
2606
d1b81779 2607 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2608 if (INSN_P (p))
d1b81779
GK
2609 {
2610 reset_used_flags (PATTERN (p));
2611 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2612 if (CALL_P (p))
2613 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2614 }
624c87aa 2615
2d4aecb3 2616 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2617 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2618
624c87aa 2619 /* Make sure that virtual parameters are not shared. */
910ad8de 2620 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2621 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2622
2623 reset_used_flags (stack_slot_list);
2624
b4aaa77b 2625 unshare_all_rtl_1 (insn);
fd743bc1
PB
2626}
2627
c2924966 2628unsigned int
fd743bc1
PB
2629unshare_all_rtl (void)
2630{
b4aaa77b 2631 unshare_all_rtl_1 (get_insns ());
c2924966 2632 return 0;
d1b81779
GK
2633}
2634
ef330312 2635
2c07f13b
JH
2636/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2637 Recursively does the same for subexpressions. */
2638
2639static void
2640verify_rtx_sharing (rtx orig, rtx insn)
2641{
2642 rtx x = orig;
2643 int i;
2644 enum rtx_code code;
2645 const char *format_ptr;
2646
2647 if (x == 0)
2648 return;
2649
2650 code = GET_CODE (x);
2651
2652 /* These types may be freely shared. */
2653
2654 switch (code)
2655 {
2656 case REG:
0ca5af51
AO
2657 case DEBUG_EXPR:
2658 case VALUE:
d8116890 2659 CASE_CONST_ANY:
2c07f13b
JH
2660 case SYMBOL_REF:
2661 case LABEL_REF:
2662 case CODE_LABEL:
2663 case PC:
2664 case CC0:
3810076b 2665 case RETURN:
26898771 2666 case SIMPLE_RETURN:
2c07f13b 2667 case SCRATCH:
3e89ed8d 2668 /* SCRATCH must be shared because they represent distinct values. */
c5c5ba89 2669 return;
3e89ed8d 2670 case CLOBBER:
c5c5ba89
JH
2671 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2672 clobbers or clobbers of hard registers that originated as pseudos.
2673 This is needed to allow safe register renaming. */
2674 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2675 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2676 return;
2677 break;
2c07f13b
JH
2678
2679 case CONST:
6fb5fa3c 2680 if (shared_const_p (orig))
2c07f13b
JH
2681 return;
2682 break;
2683
2684 case MEM:
2685 /* A MEM is allowed to be shared if its address is constant. */
2686 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2687 || reload_completed || reload_in_progress)
2688 return;
2689
2690 break;
2691
2692 default:
2693 break;
2694 }
2695
2696 /* This rtx may not be shared. If it has already been seen,
2697 replace it with a copy of itself. */
1a2caa7a 2698#ifdef ENABLE_CHECKING
2c07f13b
JH
2699 if (RTX_FLAG (x, used))
2700 {
ab532386 2701 error ("invalid rtl sharing found in the insn");
2c07f13b 2702 debug_rtx (insn);
ab532386 2703 error ("shared rtx");
2c07f13b 2704 debug_rtx (x);
ab532386 2705 internal_error ("internal consistency failure");
2c07f13b 2706 }
1a2caa7a
NS
2707#endif
2708 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2709
2c07f13b
JH
2710 RTX_FLAG (x, used) = 1;
2711
6614fd40 2712 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2713
2714 format_ptr = GET_RTX_FORMAT (code);
2715
2716 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2717 {
2718 switch (*format_ptr++)
2719 {
2720 case 'e':
2721 verify_rtx_sharing (XEXP (x, i), insn);
2722 break;
2723
2724 case 'E':
2725 if (XVEC (x, i) != NULL)
2726 {
2727 int j;
2728 int len = XVECLEN (x, i);
2729
2730 for (j = 0; j < len; j++)
2731 {
1a2caa7a
NS
2732 /* We allow sharing of ASM_OPERANDS inside single
2733 instruction. */
2c07f13b 2734 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2735 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2736 == ASM_OPERANDS))
2c07f13b
JH
2737 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2738 else
2739 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2740 }
2741 }
2742 break;
2743 }
2744 }
2745 return;
2746}
2747
0e0f87d4
SB
2748/* Reset used-flags for INSN. */
2749
2750static void
2751reset_insn_used_flags (rtx insn)
2752{
2753 gcc_assert (INSN_P (insn));
2754 reset_used_flags (PATTERN (insn));
2755 reset_used_flags (REG_NOTES (insn));
2756 if (CALL_P (insn))
2757 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2758}
2759
a24243a0 2760/* Go through all the RTL insn bodies and clear all the USED bits. */
2c07f13b 2761
a24243a0
AK
2762static void
2763reset_all_used_flags (void)
2c07f13b 2764{
dc01c3d1 2765 rtx_insn *p;
2c07f13b
JH
2766
2767 for (p = get_insns (); p; p = NEXT_INSN (p))
2768 if (INSN_P (p))
2769 {
0e0f87d4
SB
2770 rtx pat = PATTERN (p);
2771 if (GET_CODE (pat) != SEQUENCE)
2772 reset_insn_used_flags (p);
2773 else
2954a813 2774 {
0e0f87d4
SB
2775 gcc_assert (REG_NOTES (p) == NULL);
2776 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2777 {
2778 rtx insn = XVECEXP (pat, 0, i);
2779 if (INSN_P (insn))
2780 reset_insn_used_flags (insn);
2781 }
2954a813 2782 }
2c07f13b 2783 }
a24243a0
AK
2784}
2785
0e0f87d4
SB
2786/* Verify sharing in INSN. */
2787
2788static void
2789verify_insn_sharing (rtx insn)
2790{
2791 gcc_assert (INSN_P (insn));
2792 reset_used_flags (PATTERN (insn));
2793 reset_used_flags (REG_NOTES (insn));
2794 if (CALL_P (insn))
2795 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2796}
2797
a24243a0
AK
2798/* Go through all the RTL insn bodies and check that there is no unexpected
2799 sharing in between the subexpressions. */
2800
2801DEBUG_FUNCTION void
2802verify_rtl_sharing (void)
2803{
dc01c3d1 2804 rtx_insn *p;
a24243a0
AK
2805
2806 timevar_push (TV_VERIFY_RTL_SHARING);
2807
2808 reset_all_used_flags ();
2c07f13b
JH
2809
2810 for (p = get_insns (); p; p = NEXT_INSN (p))
2811 if (INSN_P (p))
2812 {
0e0f87d4
SB
2813 rtx pat = PATTERN (p);
2814 if (GET_CODE (pat) != SEQUENCE)
2815 verify_insn_sharing (p);
2816 else
2817 for (int i = 0; i < XVECLEN (pat, 0); i++)
748e88da
JDA
2818 {
2819 rtx insn = XVECEXP (pat, 0, i);
2820 if (INSN_P (insn))
2821 verify_insn_sharing (insn);
2822 }
2c07f13b 2823 }
a222c01a 2824
a24243a0
AK
2825 reset_all_used_flags ();
2826
a222c01a 2827 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2828}
2829
d1b81779
GK
2830/* Go through all the RTL insn bodies and copy any invalid shared structure.
2831 Assumes the mark bits are cleared at entry. */
2832
2c07f13b 2833void
dc01c3d1 2834unshare_all_rtl_in_chain (rtx_insn *insn)
d1b81779
GK
2835{
2836 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2837 if (INSN_P (insn))
d1b81779
GK
2838 {
2839 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2840 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2841 if (CALL_P (insn))
2842 CALL_INSN_FUNCTION_USAGE (insn)
2843 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2844 }
2845}
2846
2d4aecb3 2847/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2848 shared. We never replace the DECL_RTLs themselves with a copy,
2849 but expressions mentioned into a DECL_RTL cannot be shared with
2850 expressions in the instruction stream.
2851
2852 Note that reload may convert pseudo registers into memories in-place.
2853 Pseudo registers are always shared, but MEMs never are. Thus if we
2854 reset the used flags on MEMs in the instruction stream, we must set
2855 them again on MEMs that appear in DECL_RTLs. */
2856
2d4aecb3 2857static void
5eb2a9f2 2858set_used_decls (tree blk)
2d4aecb3
AO
2859{
2860 tree t;
2861
2862 /* Mark decls. */
910ad8de 2863 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2864 if (DECL_RTL_SET_P (t))
5eb2a9f2 2865 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2866
2867 /* Now process sub-blocks. */
87caf699 2868 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2869 set_used_decls (t);
2d4aecb3
AO
2870}
2871
23b2ce53 2872/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2873 Recursively does the same for subexpressions. Uses
2874 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2875
2876rtx
502b8322 2877copy_rtx_if_shared (rtx orig)
23b2ce53 2878{
32b32b16
AP
2879 copy_rtx_if_shared_1 (&orig);
2880 return orig;
2881}
2882
ff954f39
AP
2883/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2884 use. Recursively does the same for subexpressions. */
2885
32b32b16
AP
2886static void
2887copy_rtx_if_shared_1 (rtx *orig1)
2888{
2889 rtx x;
b3694847
SS
2890 int i;
2891 enum rtx_code code;
32b32b16 2892 rtx *last_ptr;
b3694847 2893 const char *format_ptr;
23b2ce53 2894 int copied = 0;
32b32b16
AP
2895 int length;
2896
2897 /* Repeat is used to turn tail-recursion into iteration. */
2898repeat:
2899 x = *orig1;
23b2ce53
RS
2900
2901 if (x == 0)
32b32b16 2902 return;
23b2ce53
RS
2903
2904 code = GET_CODE (x);
2905
2906 /* These types may be freely shared. */
2907
2908 switch (code)
2909 {
2910 case REG:
0ca5af51
AO
2911 case DEBUG_EXPR:
2912 case VALUE:
d8116890 2913 CASE_CONST_ANY:
23b2ce53 2914 case SYMBOL_REF:
2c07f13b 2915 case LABEL_REF:
23b2ce53
RS
2916 case CODE_LABEL:
2917 case PC:
2918 case CC0:
276e0224 2919 case RETURN:
26898771 2920 case SIMPLE_RETURN:
23b2ce53 2921 case SCRATCH:
0f41302f 2922 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2923 return;
3e89ed8d 2924 case CLOBBER:
c5c5ba89
JH
2925 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2926 clobbers or clobbers of hard registers that originated as pseudos.
2927 This is needed to allow safe register renaming. */
2928 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2929 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
3e89ed8d
JH
2930 return;
2931 break;
23b2ce53 2932
b851ea09 2933 case CONST:
6fb5fa3c 2934 if (shared_const_p (x))
32b32b16 2935 return;
b851ea09
RK
2936 break;
2937
b5b8b0ac 2938 case DEBUG_INSN:
23b2ce53
RS
2939 case INSN:
2940 case JUMP_INSN:
2941 case CALL_INSN:
2942 case NOTE:
23b2ce53
RS
2943 case BARRIER:
2944 /* The chain of insns is not being copied. */
32b32b16 2945 return;
23b2ce53 2946
e9a25f70
JL
2947 default:
2948 break;
23b2ce53
RS
2949 }
2950
2951 /* This rtx may not be shared. If it has already been seen,
2952 replace it with a copy of itself. */
2953
2adc7f12 2954 if (RTX_FLAG (x, used))
23b2ce53 2955 {
aacd3885 2956 x = shallow_copy_rtx (x);
23b2ce53
RS
2957 copied = 1;
2958 }
2adc7f12 2959 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2960
2961 /* Now scan the subexpressions recursively.
2962 We can store any replaced subexpressions directly into X
2963 since we know X is not shared! Any vectors in X
2964 must be copied if X was copied. */
2965
2966 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2967 length = GET_RTX_LENGTH (code);
2968 last_ptr = NULL;
b8698a0f 2969
32b32b16 2970 for (i = 0; i < length; i++)
23b2ce53
RS
2971 {
2972 switch (*format_ptr++)
2973 {
2974 case 'e':
32b32b16
AP
2975 if (last_ptr)
2976 copy_rtx_if_shared_1 (last_ptr);
2977 last_ptr = &XEXP (x, i);
23b2ce53
RS
2978 break;
2979
2980 case 'E':
2981 if (XVEC (x, i) != NULL)
2982 {
b3694847 2983 int j;
f0722107 2984 int len = XVECLEN (x, i);
b8698a0f 2985
6614fd40
KH
2986 /* Copy the vector iff I copied the rtx and the length
2987 is nonzero. */
f0722107 2988 if (copied && len > 0)
8f985ec4 2989 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2990
5d3cc252 2991 /* Call recursively on all inside the vector. */
f0722107 2992 for (j = 0; j < len; j++)
32b32b16
AP
2993 {
2994 if (last_ptr)
2995 copy_rtx_if_shared_1 (last_ptr);
2996 last_ptr = &XVECEXP (x, i, j);
2997 }
23b2ce53
RS
2998 }
2999 break;
3000 }
3001 }
32b32b16
AP
3002 *orig1 = x;
3003 if (last_ptr)
3004 {
3005 orig1 = last_ptr;
3006 goto repeat;
3007 }
3008 return;
23b2ce53
RS
3009}
3010
76369a82 3011/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 3012
76369a82
NF
3013static void
3014mark_used_flags (rtx x, int flag)
23b2ce53 3015{
b3694847
SS
3016 int i, j;
3017 enum rtx_code code;
3018 const char *format_ptr;
32b32b16 3019 int length;
23b2ce53 3020
32b32b16
AP
3021 /* Repeat is used to turn tail-recursion into iteration. */
3022repeat:
23b2ce53
RS
3023 if (x == 0)
3024 return;
3025
3026 code = GET_CODE (x);
3027
9faa82d8 3028 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
3029 for them. */
3030
3031 switch (code)
3032 {
3033 case REG:
0ca5af51
AO
3034 case DEBUG_EXPR:
3035 case VALUE:
d8116890 3036 CASE_CONST_ANY:
23b2ce53
RS
3037 case SYMBOL_REF:
3038 case CODE_LABEL:
3039 case PC:
3040 case CC0:
276e0224 3041 case RETURN:
26898771 3042 case SIMPLE_RETURN:
23b2ce53
RS
3043 return;
3044
b5b8b0ac 3045 case DEBUG_INSN:
23b2ce53
RS
3046 case INSN:
3047 case JUMP_INSN:
3048 case CALL_INSN:
3049 case NOTE:
3050 case LABEL_REF:
3051 case BARRIER:
3052 /* The chain of insns is not being copied. */
3053 return;
750c9258 3054
e9a25f70
JL
3055 default:
3056 break;
23b2ce53
RS
3057 }
3058
76369a82 3059 RTX_FLAG (x, used) = flag;
23b2ce53
RS
3060
3061 format_ptr = GET_RTX_FORMAT (code);
32b32b16 3062 length = GET_RTX_LENGTH (code);
b8698a0f 3063
32b32b16 3064 for (i = 0; i < length; i++)
23b2ce53
RS
3065 {
3066 switch (*format_ptr++)
3067 {
3068 case 'e':
32b32b16
AP
3069 if (i == length-1)
3070 {
3071 x = XEXP (x, i);
3072 goto repeat;
3073 }
76369a82 3074 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
3075 break;
3076
3077 case 'E':
3078 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 3079 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
3080 break;
3081 }
3082 }
3083}
2c07f13b 3084
76369a82 3085/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
3086 to look for shared sub-parts. */
3087
3088void
76369a82 3089reset_used_flags (rtx x)
2c07f13b 3090{
76369a82
NF
3091 mark_used_flags (x, 0);
3092}
2c07f13b 3093
76369a82
NF
3094/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3095 to look for shared sub-parts. */
2c07f13b 3096
76369a82
NF
3097void
3098set_used_flags (rtx x)
3099{
3100 mark_used_flags (x, 1);
2c07f13b 3101}
23b2ce53
RS
3102\f
3103/* Copy X if necessary so that it won't be altered by changes in OTHER.
3104 Return X or the rtx for the pseudo reg the value of X was copied into.
3105 OTHER must be valid as a SET_DEST. */
3106
3107rtx
502b8322 3108make_safe_from (rtx x, rtx other)
23b2ce53
RS
3109{
3110 while (1)
3111 switch (GET_CODE (other))
3112 {
3113 case SUBREG:
3114 other = SUBREG_REG (other);
3115 break;
3116 case STRICT_LOW_PART:
3117 case SIGN_EXTEND:
3118 case ZERO_EXTEND:
3119 other = XEXP (other, 0);
3120 break;
3121 default:
3122 goto done;
3123 }
3124 done:
3c0cb5de 3125 if ((MEM_P (other)
23b2ce53 3126 && ! CONSTANT_P (x)
f8cfc6aa 3127 && !REG_P (x)
23b2ce53 3128 && GET_CODE (x) != SUBREG)
f8cfc6aa 3129 || (REG_P (other)
23b2ce53
RS
3130 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3131 || reg_mentioned_p (other, x))))
3132 {
3133 rtx temp = gen_reg_rtx (GET_MODE (x));
3134 emit_move_insn (temp, x);
3135 return temp;
3136 }
3137 return x;
3138}
3139\f
3140/* Emission of insns (adding them to the doubly-linked list). */
3141
23b2ce53
RS
3142/* Return the last insn emitted, even if it is in a sequence now pushed. */
3143
db76cf1e 3144rtx_insn *
502b8322 3145get_last_insn_anywhere (void)
23b2ce53
RS
3146{
3147 struct sequence_stack *stack;
5936d944
JH
3148 if (get_last_insn ())
3149 return get_last_insn ();
49ad7cfa 3150 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
3151 if (stack->last != 0)
3152 return stack->last;
3153 return 0;
3154}
3155
2a496e8b
JDA
3156/* Return the first nonnote insn emitted in current sequence or current
3157 function. This routine looks inside SEQUENCEs. */
3158
e4685bc8 3159rtx_insn *
502b8322 3160get_first_nonnote_insn (void)
2a496e8b 3161{
dc01c3d1 3162 rtx_insn *insn = get_insns ();
91373fe8
JDA
3163
3164 if (insn)
3165 {
3166 if (NOTE_P (insn))
3167 for (insn = next_insn (insn);
3168 insn && NOTE_P (insn);
3169 insn = next_insn (insn))
3170 continue;
3171 else
3172 {
2ca202e7 3173 if (NONJUMP_INSN_P (insn)
91373fe8 3174 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3175 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
91373fe8
JDA
3176 }
3177 }
2a496e8b
JDA
3178
3179 return insn;
3180}
3181
3182/* Return the last nonnote insn emitted in current sequence or current
3183 function. This routine looks inside SEQUENCEs. */
3184
e4685bc8 3185rtx_insn *
502b8322 3186get_last_nonnote_insn (void)
2a496e8b 3187{
dc01c3d1 3188 rtx_insn *insn = get_last_insn ();
91373fe8
JDA
3189
3190 if (insn)
3191 {
3192 if (NOTE_P (insn))
3193 for (insn = previous_insn (insn);
3194 insn && NOTE_P (insn);
3195 insn = previous_insn (insn))
3196 continue;
3197 else
3198 {
dc01c3d1
DM
3199 if (NONJUMP_INSN_P (insn))
3200 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3201 insn = seq->insn (seq->len () - 1);
91373fe8
JDA
3202 }
3203 }
2a496e8b
JDA
3204
3205 return insn;
3206}
3207
b5b8b0ac
AO
3208/* Return the number of actual (non-debug) insns emitted in this
3209 function. */
3210
3211int
3212get_max_insn_count (void)
3213{
3214 int n = cur_insn_uid;
3215
3216 /* The table size must be stable across -g, to avoid codegen
3217 differences due to debug insns, and not be affected by
3218 -fmin-insn-uid, to avoid excessive table size and to simplify
3219 debugging of -fcompare-debug failures. */
3220 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3221 n -= cur_debug_insn_uid;
3222 else
3223 n -= MIN_NONDEBUG_INSN_UID;
3224
3225 return n;
3226}
3227
23b2ce53
RS
3228\f
3229/* Return the next insn. If it is a SEQUENCE, return the first insn
3230 of the sequence. */
3231
eb51c837 3232rtx_insn *
4ce524a1 3233next_insn (rtx_insn *insn)
23b2ce53 3234{
75547801
KG
3235 if (insn)
3236 {
3237 insn = NEXT_INSN (insn);
3238 if (insn && NONJUMP_INSN_P (insn)
3239 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3240 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
75547801 3241 }
23b2ce53 3242
dc01c3d1 3243 return insn;
23b2ce53
RS
3244}
3245
3246/* Return the previous insn. If it is a SEQUENCE, return the last insn
3247 of the sequence. */
3248
eb51c837 3249rtx_insn *
4ce524a1 3250previous_insn (rtx_insn *insn)
23b2ce53 3251{
75547801
KG
3252 if (insn)
3253 {
3254 insn = PREV_INSN (insn);
dc01c3d1
DM
3255 if (insn && NONJUMP_INSN_P (insn))
3256 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3257 insn = seq->insn (seq->len () - 1);
75547801 3258 }
23b2ce53 3259
dc01c3d1 3260 return insn;
23b2ce53
RS
3261}
3262
3263/* Return the next insn after INSN that is not a NOTE. This routine does not
3264 look inside SEQUENCEs. */
3265
eb51c837 3266rtx_insn *
dc01c3d1 3267next_nonnote_insn (rtx uncast_insn)
23b2ce53 3268{
dc01c3d1 3269 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
75547801
KG
3270 while (insn)
3271 {
3272 insn = NEXT_INSN (insn);
3273 if (insn == 0 || !NOTE_P (insn))
3274 break;
3275 }
23b2ce53 3276
dc01c3d1 3277 return insn;
23b2ce53
RS
3278}
3279
1e211590
DD
3280/* Return the next insn after INSN that is not a NOTE, but stop the
3281 search before we enter another basic block. This routine does not
3282 look inside SEQUENCEs. */
3283
eb51c837 3284rtx_insn *
e4685bc8 3285next_nonnote_insn_bb (rtx_insn *insn)
1e211590
DD
3286{
3287 while (insn)
3288 {
3289 insn = NEXT_INSN (insn);
3290 if (insn == 0 || !NOTE_P (insn))
3291 break;
3292 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3293 return NULL;
1e211590
DD
3294 }
3295
dc01c3d1 3296 return insn;
1e211590
DD
3297}
3298
23b2ce53
RS
3299/* Return the previous insn before INSN that is not a NOTE. This routine does
3300 not look inside SEQUENCEs. */
3301
eb51c837 3302rtx_insn *
dc01c3d1 3303prev_nonnote_insn (rtx uncast_insn)
23b2ce53 3304{
dc01c3d1
DM
3305 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3306
75547801
KG
3307 while (insn)
3308 {
3309 insn = PREV_INSN (insn);
3310 if (insn == 0 || !NOTE_P (insn))
3311 break;
3312 }
23b2ce53 3313
dc01c3d1 3314 return insn;
23b2ce53
RS
3315}
3316
896aa4ea
DD
3317/* Return the previous insn before INSN that is not a NOTE, but stop
3318 the search before we enter another basic block. This routine does
3319 not look inside SEQUENCEs. */
3320
eb51c837 3321rtx_insn *
dc01c3d1 3322prev_nonnote_insn_bb (rtx uncast_insn)
896aa4ea 3323{
dc01c3d1
DM
3324 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3325
896aa4ea
DD
3326 while (insn)
3327 {
3328 insn = PREV_INSN (insn);
3329 if (insn == 0 || !NOTE_P (insn))
3330 break;
3331 if (NOTE_INSN_BASIC_BLOCK_P (insn))
eb51c837 3332 return NULL;
896aa4ea
DD
3333 }
3334
dc01c3d1 3335 return insn;
896aa4ea
DD
3336}
3337
b5b8b0ac
AO
3338/* Return the next insn after INSN that is not a DEBUG_INSN. This
3339 routine does not look inside SEQUENCEs. */
3340
eb51c837 3341rtx_insn *
dc01c3d1 3342next_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3343{
dc01c3d1
DM
3344 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3345
b5b8b0ac
AO
3346 while (insn)
3347 {
3348 insn = NEXT_INSN (insn);
3349 if (insn == 0 || !DEBUG_INSN_P (insn))
3350 break;
3351 }
3352
dc01c3d1 3353 return insn;
b5b8b0ac
AO
3354}
3355
3356/* Return the previous insn before INSN that is not a DEBUG_INSN.
3357 This routine does not look inside SEQUENCEs. */
3358
eb51c837 3359rtx_insn *
dc01c3d1 3360prev_nondebug_insn (rtx uncast_insn)
b5b8b0ac 3361{
dc01c3d1
DM
3362 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3363
b5b8b0ac
AO
3364 while (insn)
3365 {
3366 insn = PREV_INSN (insn);
3367 if (insn == 0 || !DEBUG_INSN_P (insn))
3368 break;
3369 }
3370
dc01c3d1 3371 return insn;
b5b8b0ac
AO
3372}
3373
f0fc0803
JJ
3374/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3375 This routine does not look inside SEQUENCEs. */
3376
eb51c837 3377rtx_insn *
dc01c3d1 3378next_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3379{
dc01c3d1
DM
3380 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3381
f0fc0803
JJ
3382 while (insn)
3383 {
3384 insn = NEXT_INSN (insn);
3385 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3386 break;
3387 }
3388
dc01c3d1 3389 return insn;
f0fc0803
JJ
3390}
3391
3392/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3393 This routine does not look inside SEQUENCEs. */
3394
eb51c837 3395rtx_insn *
dc01c3d1 3396prev_nonnote_nondebug_insn (rtx uncast_insn)
f0fc0803 3397{
dc01c3d1
DM
3398 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3399
f0fc0803
JJ
3400 while (insn)
3401 {
3402 insn = PREV_INSN (insn);
3403 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3404 break;
3405 }
3406
dc01c3d1 3407 return insn;
f0fc0803
JJ
3408}
3409
23b2ce53
RS
3410/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3411 or 0, if there is none. This routine does not look inside
0f41302f 3412 SEQUENCEs. */
23b2ce53 3413
eb51c837 3414rtx_insn *
dc01c3d1 3415next_real_insn (rtx uncast_insn)
23b2ce53 3416{
dc01c3d1
DM
3417 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3418
75547801
KG
3419 while (insn)
3420 {
3421 insn = NEXT_INSN (insn);
3422 if (insn == 0 || INSN_P (insn))
3423 break;
3424 }
23b2ce53 3425
dc01c3d1 3426 return insn;
23b2ce53
RS
3427}
3428
3429/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3430 or 0, if there is none. This routine does not look inside
3431 SEQUENCEs. */
3432
eb51c837 3433rtx_insn *
dc01c3d1 3434prev_real_insn (rtx uncast_insn)
23b2ce53 3435{
dc01c3d1
DM
3436 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3437
75547801
KG
3438 while (insn)
3439 {
3440 insn = PREV_INSN (insn);
3441 if (insn == 0 || INSN_P (insn))
3442 break;
3443 }
23b2ce53 3444
dc01c3d1 3445 return insn;
23b2ce53
RS
3446}
3447
ee960939
OH
3448/* Return the last CALL_INSN in the current list, or 0 if there is none.
3449 This routine does not look inside SEQUENCEs. */
3450
049cfc4a 3451rtx_call_insn *
502b8322 3452last_call_insn (void)
ee960939 3453{
049cfc4a 3454 rtx_insn *insn;
ee960939
OH
3455
3456 for (insn = get_last_insn ();
4b4bf941 3457 insn && !CALL_P (insn);
ee960939
OH
3458 insn = PREV_INSN (insn))
3459 ;
3460
049cfc4a 3461 return safe_as_a <rtx_call_insn *> (insn);
ee960939
OH
3462}
3463
23b2ce53 3464/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3465 does not look inside SEQUENCEs. After reload this also skips over
3466 standalone USE and CLOBBER insn. */
23b2ce53 3467
69732dcb 3468int
4f588890 3469active_insn_p (const_rtx insn)
69732dcb 3470{
4b4bf941 3471 return (CALL_P (insn) || JUMP_P (insn)
39718607 3472 || JUMP_TABLE_DATA_P (insn) /* FIXME */
4b4bf941 3473 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3474 && (! reload_completed
3475 || (GET_CODE (PATTERN (insn)) != USE
3476 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3477}
3478
eb51c837 3479rtx_insn *
dc01c3d1 3480next_active_insn (rtx uncast_insn)
23b2ce53 3481{
dc01c3d1
DM
3482 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3483
75547801
KG
3484 while (insn)
3485 {
3486 insn = NEXT_INSN (insn);
3487 if (insn == 0 || active_insn_p (insn))
3488 break;
3489 }
23b2ce53 3490
dc01c3d1 3491 return insn;
23b2ce53
RS
3492}
3493
3494/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3495 does not look inside SEQUENCEs. After reload this also skips over
3496 standalone USE and CLOBBER insn. */
23b2ce53 3497
eb51c837 3498rtx_insn *
dc01c3d1 3499prev_active_insn (rtx uncast_insn)
23b2ce53 3500{
dc01c3d1
DM
3501 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3502
75547801
KG
3503 while (insn)
3504 {
3505 insn = PREV_INSN (insn);
3506 if (insn == 0 || active_insn_p (insn))
3507 break;
3508 }
23b2ce53 3509
dc01c3d1 3510 return insn;
23b2ce53 3511}
23b2ce53
RS
3512\f
3513#ifdef HAVE_cc0
3514/* Return the next insn that uses CC0 after INSN, which is assumed to
3515 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3516 applied to the result of this function should yield INSN).
3517
3518 Normally, this is simply the next insn. However, if a REG_CC_USER note
3519 is present, it contains the insn that uses CC0.
3520
3521 Return 0 if we can't find the insn. */
3522
75b46023 3523rtx_insn *
dc01c3d1 3524next_cc0_user (rtx uncast_insn)
23b2ce53 3525{
dc01c3d1
DM
3526 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3527
906c4e36 3528 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3529
3530 if (note)
75b46023 3531 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3532
3533 insn = next_nonnote_insn (insn);
4b4bf941 3534 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 3535 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
23b2ce53 3536
2c3c49de 3537 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
dc01c3d1 3538 return insn;
23b2ce53
RS
3539
3540 return 0;
3541}
3542
3543/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3544 note, it is the previous insn. */
3545
75b46023 3546rtx_insn *
dc01c3d1 3547prev_cc0_setter (rtx uncast_insn)
23b2ce53 3548{
dc01c3d1
DM
3549 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3550
906c4e36 3551 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3552
3553 if (note)
75b46023 3554 return safe_as_a <rtx_insn *> (XEXP (note, 0));
23b2ce53
RS
3555
3556 insn = prev_nonnote_insn (insn);
5b0264cb 3557 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53 3558
dc01c3d1 3559 return insn;
23b2ce53
RS
3560}
3561#endif
e5bef2e4 3562
594f8779
RZ
3563#ifdef AUTO_INC_DEC
3564/* Find a RTX_AUTOINC class rtx which matches DATA. */
3565
3566static int
9021b8ec 3567find_auto_inc (const_rtx x, const_rtx reg)
594f8779 3568{
9021b8ec
RS
3569 subrtx_iterator::array_type array;
3570 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
594f8779 3571 {
9021b8ec
RS
3572 const_rtx x = *iter;
3573 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3574 && rtx_equal_p (reg, XEXP (x, 0)))
3575 return true;
594f8779 3576 }
9021b8ec 3577 return false;
594f8779
RZ
3578}
3579#endif
3580
e5bef2e4
HB
3581/* Increment the label uses for all labels present in rtx. */
3582
3583static void
502b8322 3584mark_label_nuses (rtx x)
e5bef2e4 3585{
b3694847
SS
3586 enum rtx_code code;
3587 int i, j;
3588 const char *fmt;
e5bef2e4
HB
3589
3590 code = GET_CODE (x);
a827d9b1
DM
3591 if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3592 LABEL_NUSES (LABEL_REF_LABEL (x))++;
e5bef2e4
HB
3593
3594 fmt = GET_RTX_FORMAT (code);
3595 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3596 {
3597 if (fmt[i] == 'e')
0fb7aeda 3598 mark_label_nuses (XEXP (x, i));
e5bef2e4 3599 else if (fmt[i] == 'E')
0fb7aeda 3600 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3601 mark_label_nuses (XVECEXP (x, i, j));
3602 }
3603}
3604
23b2ce53
RS
3605\f
3606/* Try splitting insns that can be split for better scheduling.
3607 PAT is the pattern which might split.
3608 TRIAL is the insn providing PAT.
cc2902df 3609 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3610
3611 If this routine succeeds in splitting, it returns the first or last
11147ebe 3612 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3613 returns TRIAL. If the insn to be returned can be split, it will be. */
3614
53f04688 3615rtx_insn *
dc01c3d1 3616try_split (rtx pat, rtx uncast_trial, int last)
23b2ce53 3617{
dc01c3d1 3618 rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
53f04688
DM
3619 rtx_insn *before = PREV_INSN (trial);
3620 rtx_insn *after = NEXT_INSN (trial);
dc01c3d1
DM
3621 rtx note;
3622 rtx_insn *seq, *tem;
6b24c259 3623 int probability;
dc01c3d1 3624 rtx_insn *insn_last, *insn;
599aedd9 3625 int njumps = 0;
4f660b15 3626 rtx call_insn = NULL_RTX;
6b24c259 3627
cd9c1ca8
RH
3628 /* We're not good at redistributing frame information. */
3629 if (RTX_FRAME_RELATED_P (trial))
dc01c3d1 3630 return trial;
cd9c1ca8 3631
6b24c259
JH
3632 if (any_condjump_p (trial)
3633 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
e5af9ddd 3634 split_branch_probability = XINT (note, 0);
6b24c259
JH
3635 probability = split_branch_probability;
3636
dc01c3d1 3637 seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
6b24c259
JH
3638
3639 split_branch_probability = -1;
23b2ce53 3640
599aedd9 3641 if (!seq)
dc01c3d1 3642 return trial;
599aedd9
RH
3643
3644 /* Avoid infinite loop if any insn of the result matches
3645 the original pattern. */
3646 insn_last = seq;
3647 while (1)
23b2ce53 3648 {
599aedd9
RH
3649 if (INSN_P (insn_last)
3650 && rtx_equal_p (PATTERN (insn_last), pat))
dc01c3d1 3651 return trial;
599aedd9
RH
3652 if (!NEXT_INSN (insn_last))
3653 break;
3654 insn_last = NEXT_INSN (insn_last);
3655 }
750c9258 3656
6fb5fa3c
DB
3657 /* We will be adding the new sequence to the function. The splitters
3658 may have introduced invalid RTL sharing, so unshare the sequence now. */
3659 unshare_all_rtl_in_chain (seq);
3660
339ba33b 3661 /* Mark labels and copy flags. */
599aedd9
RH
3662 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3663 {
4b4bf941 3664 if (JUMP_P (insn))
599aedd9 3665 {
339ba33b
RS
3666 if (JUMP_P (trial))
3667 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
599aedd9
RH
3668 mark_jump_label (PATTERN (insn), insn, 0);
3669 njumps++;
3670 if (probability != -1
3671 && any_condjump_p (insn)
3672 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3673 {
599aedd9
RH
3674 /* We can preserve the REG_BR_PROB notes only if exactly
3675 one jump is created, otherwise the machine description
3676 is responsible for this step using
3677 split_branch_probability variable. */
5b0264cb 3678 gcc_assert (njumps == 1);
e5af9ddd 3679 add_int_reg_note (insn, REG_BR_PROB, probability);
2f937369 3680 }
599aedd9
RH
3681 }
3682 }
3683
3684 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3685 in SEQ and copy any additional information across. */
4b4bf941 3686 if (CALL_P (trial))
599aedd9
RH
3687 {
3688 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3689 if (CALL_P (insn))
599aedd9 3690 {
dc01c3d1
DM
3691 rtx_insn *next;
3692 rtx *p;
65712d5c 3693
4f660b15
RO
3694 gcc_assert (call_insn == NULL_RTX);
3695 call_insn = insn;
3696
65712d5c
RS
3697 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3698 target may have explicitly specified. */
3699 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3700 while (*p)
3701 p = &XEXP (*p, 1);
3702 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3703
3704 /* If the old call was a sibling call, the new one must
3705 be too. */
599aedd9 3706 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3707
3708 /* If the new call is the last instruction in the sequence,
3709 it will effectively replace the old call in-situ. Otherwise
3710 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3711 so that it comes immediately after the new call. */
3712 if (NEXT_INSN (insn))
65f3dedb
RS
3713 for (next = NEXT_INSN (trial);
3714 next && NOTE_P (next);
3715 next = NEXT_INSN (next))
3716 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3717 {
3718 remove_insn (next);
3719 add_insn_after (next, insn, NULL);
65f3dedb 3720 break;
65712d5c 3721 }
599aedd9
RH
3722 }
3723 }
4b5e8abe 3724
599aedd9
RH
3725 /* Copy notes, particularly those related to the CFG. */
3726 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3727 {
3728 switch (REG_NOTE_KIND (note))
3729 {
3730 case REG_EH_REGION:
1d65f45c 3731 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3732 break;
216183ce 3733
599aedd9
RH
3734 case REG_NORETURN:
3735 case REG_SETJMP:
0a35513e 3736 case REG_TM:
594f8779 3737 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3738 {
4b4bf941 3739 if (CALL_P (insn))
65c5f2a6 3740 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3741 }
599aedd9 3742 break;
d6e95df8 3743
599aedd9 3744 case REG_NON_LOCAL_GOTO:
594f8779 3745 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3746 {
4b4bf941 3747 if (JUMP_P (insn))
65c5f2a6 3748 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3749 }
599aedd9 3750 break;
e5bef2e4 3751
594f8779
RZ
3752#ifdef AUTO_INC_DEC
3753 case REG_INC:
3754 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3755 {
3756 rtx reg = XEXP (note, 0);
3757 if (!FIND_REG_INC_NOTE (insn, reg)
9021b8ec 3758 && find_auto_inc (PATTERN (insn), reg))
65c5f2a6 3759 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3760 }
3761 break;
3762#endif
3763
9a08d230 3764 case REG_ARGS_SIZE:
e5b51ca0 3765 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
9a08d230
RH
3766 break;
3767
4f660b15
RO
3768 case REG_CALL_DECL:
3769 gcc_assert (call_insn != NULL_RTX);
3770 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3771 break;
3772
599aedd9
RH
3773 default:
3774 break;
23b2ce53 3775 }
599aedd9
RH
3776 }
3777
3778 /* If there are LABELS inside the split insns increment the
3779 usage count so we don't delete the label. */
cf7c4aa6 3780 if (INSN_P (trial))
599aedd9
RH
3781 {
3782 insn = insn_last;
3783 while (insn != NULL_RTX)
23b2ce53 3784 {
cf7c4aa6 3785 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3786 if (NONJUMP_INSN_P (insn))
599aedd9 3787 mark_label_nuses (PATTERN (insn));
23b2ce53 3788
599aedd9
RH
3789 insn = PREV_INSN (insn);
3790 }
23b2ce53
RS
3791 }
3792
5368224f 3793 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
599aedd9
RH
3794
3795 delete_insn (trial);
599aedd9
RH
3796
3797 /* Recursively call try_split for each new insn created; by the
3798 time control returns here that insn will be fully split, so
3799 set LAST and continue from the insn after the one returned.
3800 We can't use next_active_insn here since AFTER may be a note.
3801 Ignore deleted insns, which can be occur if not optimizing. */
3802 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4654c0cf 3803 if (! tem->deleted () && INSN_P (tem))
599aedd9
RH
3804 tem = try_split (PATTERN (tem), tem, 1);
3805
3806 /* Return either the first or the last insn, depending on which was
3807 requested. */
3808 return last
5936d944 3809 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3810 : NEXT_INSN (before);
23b2ce53
RS
3811}
3812\f
3813/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3814 Store PATTERN in the pattern slots. */
23b2ce53 3815
167b9fae 3816rtx_insn *
502b8322 3817make_insn_raw (rtx pattern)
23b2ce53 3818{
167b9fae 3819 rtx_insn *insn;
23b2ce53 3820
167b9fae 3821 insn = as_a <rtx_insn *> (rtx_alloc (INSN));
23b2ce53 3822
43127294 3823 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3824 PATTERN (insn) = pattern;
3825 INSN_CODE (insn) = -1;
1632afca 3826 REG_NOTES (insn) = NULL;
5368224f 3827 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3828 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3829
47984720
NC
3830#ifdef ENABLE_RTL_CHECKING
3831 if (insn
2c3c49de 3832 && INSN_P (insn)
47984720
NC
3833 && (returnjump_p (insn)
3834 || (GET_CODE (insn) == SET
3835 && SET_DEST (insn) == pc_rtx)))
3836 {
d4ee4d25 3837 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3838 debug_rtx (insn);
3839 }
3840#endif
750c9258 3841
23b2ce53
RS
3842 return insn;
3843}
3844
b5b8b0ac
AO
3845/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3846
167b9fae 3847static rtx_insn *
b5b8b0ac
AO
3848make_debug_insn_raw (rtx pattern)
3849{
167b9fae 3850 rtx_debug_insn *insn;
b5b8b0ac 3851
167b9fae 3852 insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
b5b8b0ac
AO
3853 INSN_UID (insn) = cur_debug_insn_uid++;
3854 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3855 INSN_UID (insn) = cur_insn_uid++;
3856
3857 PATTERN (insn) = pattern;
3858 INSN_CODE (insn) = -1;
3859 REG_NOTES (insn) = NULL;
5368224f 3860 INSN_LOCATION (insn) = curr_insn_location ();
b5b8b0ac
AO
3861 BLOCK_FOR_INSN (insn) = NULL;
3862
3863 return insn;
3864}
3865
2f937369 3866/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3867
167b9fae 3868static rtx_insn *
502b8322 3869make_jump_insn_raw (rtx pattern)
23b2ce53 3870{
167b9fae 3871 rtx_jump_insn *insn;
23b2ce53 3872
167b9fae 3873 insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
1632afca 3874 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3875
3876 PATTERN (insn) = pattern;
3877 INSN_CODE (insn) = -1;
1632afca
RS
3878 REG_NOTES (insn) = NULL;
3879 JUMP_LABEL (insn) = NULL;
5368224f 3880 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3881 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3882
3883 return insn;
3884}
aff507f4 3885
2f937369 3886/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4 3887
167b9fae 3888static rtx_insn *
502b8322 3889make_call_insn_raw (rtx pattern)
aff507f4 3890{
167b9fae 3891 rtx_call_insn *insn;
aff507f4 3892
167b9fae 3893 insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
aff507f4
RK
3894 INSN_UID (insn) = cur_insn_uid++;
3895
3896 PATTERN (insn) = pattern;
3897 INSN_CODE (insn) = -1;
aff507f4
RK
3898 REG_NOTES (insn) = NULL;
3899 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
5368224f 3900 INSN_LOCATION (insn) = curr_insn_location ();
ba4f7968 3901 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3902
3903 return insn;
3904}
96fba521
SB
3905
3906/* Like `make_insn_raw' but make a NOTE instead of an insn. */
3907
66e8df53 3908static rtx_note *
96fba521
SB
3909make_note_raw (enum insn_note subtype)
3910{
3911 /* Some notes are never created this way at all. These notes are
3912 only created by patching out insns. */
3913 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3914 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3915
66e8df53 3916 rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
96fba521
SB
3917 INSN_UID (note) = cur_insn_uid++;
3918 NOTE_KIND (note) = subtype;
3919 BLOCK_FOR_INSN (note) = NULL;
3920 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3921 return note;
3922}
23b2ce53 3923\f
96fba521
SB
3924/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3925 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3926 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3927
3928static inline void
9152e0aa 3929link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
96fba521 3930{
0f82e5c9
DM
3931 SET_PREV_INSN (insn) = prev;
3932 SET_NEXT_INSN (insn) = next;
96fba521
SB
3933 if (prev != NULL)
3934 {
0f82e5c9 3935 SET_NEXT_INSN (prev) = insn;
96fba521
SB
3936 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3937 {
e6eda746
DM
3938 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3939 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
96fba521
SB
3940 }
3941 }
3942 if (next != NULL)
3943 {
0f82e5c9 3944 SET_PREV_INSN (next) = insn;
96fba521 3945 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
3946 {
3947 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3948 SET_PREV_INSN (sequence->insn (0)) = insn;
3949 }
96fba521 3950 }
3ccb989e
SB
3951
3952 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3953 {
e6eda746
DM
3954 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3955 SET_PREV_INSN (sequence->insn (0)) = prev;
3956 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3ccb989e 3957 }
96fba521
SB
3958}
3959
23b2ce53
RS
3960/* Add INSN to the end of the doubly-linked list.
3961 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3962
3963void
9152e0aa 3964add_insn (rtx_insn *insn)
23b2ce53 3965{
9152e0aa 3966 rtx_insn *prev = get_last_insn ();
96fba521 3967 link_insn_into_chain (insn, prev, NULL);
5936d944
JH
3968 if (NULL == get_insns ())
3969 set_first_insn (insn);
5936d944 3970 set_last_insn (insn);
23b2ce53
RS
3971}
3972
96fba521 3973/* Add INSN into the doubly-linked list after insn AFTER. */
23b2ce53 3974
96fba521 3975static void
9152e0aa 3976add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
23b2ce53 3977{
9152e0aa 3978 rtx_insn *next = NEXT_INSN (after);
23b2ce53 3979
4654c0cf 3980 gcc_assert (!optimize || !after->deleted ());
ba213285 3981
96fba521 3982 link_insn_into_chain (insn, after, next);
23b2ce53 3983
96fba521 3984 if (next == NULL)
23b2ce53 3985 {
96fba521
SB
3986 if (get_last_insn () == after)
3987 set_last_insn (insn);
3988 else
3989 {
3990 struct sequence_stack *stack = seq_stack;
3991 /* Scan all pending sequences too. */
3992 for (; stack; stack = stack->next)
3993 if (after == stack->last)
3994 {
3995 stack->last = insn;
3996 break;
3997 }
3998 }
23b2ce53 3999 }
96fba521
SB
4000}
4001
4002/* Add INSN into the doubly-linked list before insn BEFORE. */
4003
4004static void
9152e0aa 4005add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
96fba521 4006{
9152e0aa 4007 rtx_insn *prev = PREV_INSN (before);
96fba521 4008
4654c0cf 4009 gcc_assert (!optimize || !before->deleted ());
96fba521
SB
4010
4011 link_insn_into_chain (insn, prev, before);
4012
4013 if (prev == NULL)
23b2ce53 4014 {
96fba521
SB
4015 if (get_insns () == before)
4016 set_first_insn (insn);
4017 else
4018 {
4019 struct sequence_stack *stack = seq_stack;
4020 /* Scan all pending sequences too. */
4021 for (; stack; stack = stack->next)
4022 if (before == stack->first)
4023 {
4024 stack->first = insn;
4025 break;
4026 }
a0ae8e8d 4027
96fba521
SB
4028 gcc_assert (stack);
4029 }
23b2ce53 4030 }
96fba521
SB
4031}
4032
4033/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4034 If BB is NULL, an attempt is made to infer the bb from before.
4035
4036 This and the next function should be the only functions called
4037 to insert an insn once delay slots have been filled since only
4038 they know how to update a SEQUENCE. */
23b2ce53 4039
96fba521 4040void
9152e0aa 4041add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
96fba521 4042{
1130d5e3 4043 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
9152e0aa 4044 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
96fba521 4045 add_insn_after_nobb (insn, after);
4b4bf941
JQ
4046 if (!BARRIER_P (after)
4047 && !BARRIER_P (insn)
3c030e88
JH
4048 && (bb = BLOCK_FOR_INSN (after)))
4049 {
4050 set_block_for_insn (insn, bb);
38c1593d 4051 if (INSN_P (insn))
6fb5fa3c 4052 df_insn_rescan (insn);
3c030e88 4053 /* Should not happen as first in the BB is always
a1f300c0 4054 either NOTE or LABEL. */
a813c111 4055 if (BB_END (bb) == after
3c030e88 4056 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 4057 && !BARRIER_P (insn)
a38e7aa5 4058 && !NOTE_INSN_BASIC_BLOCK_P (insn))
1130d5e3 4059 BB_END (bb) = insn;
3c030e88 4060 }
23b2ce53
RS
4061}
4062
96fba521
SB
4063/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4064 If BB is NULL, an attempt is made to infer the bb from before.
4065
4066 This and the previous function should be the only functions called
4067 to insert an insn once delay slots have been filled since only
4068 they know how to update a SEQUENCE. */
a0ae8e8d
RK
4069
4070void
9152e0aa 4071add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
a0ae8e8d 4072{
9152e0aa
DM
4073 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4074 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
96fba521 4075 add_insn_before_nobb (insn, before);
a0ae8e8d 4076
b8698a0f 4077 if (!bb
6fb5fa3c
DB
4078 && !BARRIER_P (before)
4079 && !BARRIER_P (insn))
4080 bb = BLOCK_FOR_INSN (before);
4081
4082 if (bb)
3c030e88
JH
4083 {
4084 set_block_for_insn (insn, bb);
38c1593d 4085 if (INSN_P (insn))
6fb5fa3c 4086 df_insn_rescan (insn);
5b0264cb 4087 /* Should not happen as first in the BB is always either NOTE or
43e05e45 4088 LABEL. */
5b0264cb
NS
4089 gcc_assert (BB_HEAD (bb) != insn
4090 /* Avoid clobbering of structure when creating new BB. */
4091 || BARRIER_P (insn)
a38e7aa5 4092 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88 4093 }
a0ae8e8d
RK
4094}
4095
6fb5fa3c
DB
4096/* Replace insn with an deleted instruction note. */
4097
0ce2b299
EB
4098void
4099set_insn_deleted (rtx insn)
6fb5fa3c 4100{
39718607 4101 if (INSN_P (insn))
b2908ba6 4102 df_insn_delete (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
4103 PUT_CODE (insn, NOTE);
4104 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4105}
4106
4107
1f397f45
SB
4108/* Unlink INSN from the insn chain.
4109
4110 This function knows how to handle sequences.
4111
4112 This function does not invalidate data flow information associated with
4113 INSN (i.e. does not call df_insn_delete). That makes this function
4114 usable for only disconnecting an insn from the chain, and re-emit it
4115 elsewhere later.
4116
4117 To later insert INSN elsewhere in the insn chain via add_insn and
4118 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4119 the caller. Nullifying them here breaks many insn chain walks.
4120
4121 To really delete an insn and related DF information, use delete_insn. */
4122
89e99eea 4123void
dc01c3d1 4124remove_insn (rtx uncast_insn)
89e99eea 4125{
dc01c3d1 4126 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1130d5e3
DM
4127 rtx_insn *next = NEXT_INSN (insn);
4128 rtx_insn *prev = PREV_INSN (insn);
53c17031
JH
4129 basic_block bb;
4130
89e99eea
DB
4131 if (prev)
4132 {
0f82e5c9 4133 SET_NEXT_INSN (prev) = next;
4b4bf941 4134 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea 4135 {
e6eda746
DM
4136 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4137 SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
89e99eea
DB
4138 }
4139 }
5936d944
JH
4140 else if (get_insns () == insn)
4141 {
fb9ef4c1 4142 if (next)
0f82e5c9 4143 SET_PREV_INSN (next) = NULL;
5936d944
JH
4144 set_first_insn (next);
4145 }
89e99eea
DB
4146 else
4147 {
49ad7cfa 4148 struct sequence_stack *stack = seq_stack;
89e99eea
DB
4149 /* Scan all pending sequences too. */
4150 for (; stack; stack = stack->next)
4151 if (insn == stack->first)
4152 {
4153 stack->first = next;
4154 break;
4155 }
4156
5b0264cb 4157 gcc_assert (stack);
89e99eea
DB
4158 }
4159
4160 if (next)
4161 {
0f82e5c9 4162 SET_PREV_INSN (next) = prev;
4b4bf941 4163 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
e6eda746
DM
4164 {
4165 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4166 SET_PREV_INSN (sequence->insn (0)) = prev;
4167 }
89e99eea 4168 }
5936d944
JH
4169 else if (get_last_insn () == insn)
4170 set_last_insn (prev);
89e99eea
DB
4171 else
4172 {
49ad7cfa 4173 struct sequence_stack *stack = seq_stack;
89e99eea
DB
4174 /* Scan all pending sequences too. */
4175 for (; stack; stack = stack->next)
4176 if (insn == stack->last)
4177 {
4178 stack->last = prev;
4179 break;
4180 }
4181
5b0264cb 4182 gcc_assert (stack);
89e99eea 4183 }
80eb8028 4184
80eb8028 4185 /* Fix up basic block boundaries, if necessary. */
4b4bf941 4186 if (!BARRIER_P (insn)
53c17031
JH
4187 && (bb = BLOCK_FOR_INSN (insn)))
4188 {
a813c111 4189 if (BB_HEAD (bb) == insn)
53c17031 4190 {
3bf1e984
RK
4191 /* Never ever delete the basic block note without deleting whole
4192 basic block. */
5b0264cb 4193 gcc_assert (!NOTE_P (insn));
1130d5e3 4194 BB_HEAD (bb) = next;
53c17031 4195 }
a813c111 4196 if (BB_END (bb) == insn)
1130d5e3 4197 BB_END (bb) = prev;
53c17031 4198 }
89e99eea
DB
4199}
4200
ee960939
OH
4201/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4202
4203void
502b8322 4204add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4205{
5b0264cb 4206 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4207
4208 /* Put the register usage information on the CALL. If there is already
4209 some usage information, put ours at the end. */
4210 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4211 {
4212 rtx link;
4213
4214 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4215 link = XEXP (link, 1))
4216 ;
4217
4218 XEXP (link, 1) = call_fusage;
4219 }
4220 else
4221 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4222}
4223
23b2ce53
RS
4224/* Delete all insns made since FROM.
4225 FROM becomes the new last instruction. */
4226
4227void
fee3e72c 4228delete_insns_since (rtx_insn *from)
23b2ce53
RS
4229{
4230 if (from == 0)
5936d944 4231 set_first_insn (0);
23b2ce53 4232 else
0f82e5c9 4233 SET_NEXT_INSN (from) = 0;
5936d944 4234 set_last_insn (from);
23b2ce53
RS
4235}
4236
5dab5552
MS
4237/* This function is deprecated, please use sequences instead.
4238
4239 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4240 The insns to be moved are those between FROM and TO.
4241 They are moved to a new position after the insn AFTER.
4242 AFTER must not be FROM or TO or any insn in between.
4243
4244 This function does not know about SEQUENCEs and hence should not be
4245 called after delay-slot filling has been done. */
4246
4247void
fee3e72c 4248reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
23b2ce53 4249{
4f8344eb 4250#ifdef ENABLE_CHECKING
fee3e72c 4251 rtx_insn *x;
4f8344eb
HPN
4252 for (x = from; x != to; x = NEXT_INSN (x))
4253 gcc_assert (after != x);
4254 gcc_assert (after != to);
4255#endif
4256
23b2ce53
RS
4257 /* Splice this bunch out of where it is now. */
4258 if (PREV_INSN (from))
0f82e5c9 4259 SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
23b2ce53 4260 if (NEXT_INSN (to))
0f82e5c9 4261 SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4262 if (get_last_insn () == to)
4263 set_last_insn (PREV_INSN (from));
4264 if (get_insns () == from)
4265 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4266
4267 /* Make the new neighbors point to it and it to them. */
4268 if (NEXT_INSN (after))
0f82e5c9 4269 SET_PREV_INSN (NEXT_INSN (after)) = to;
23b2ce53 4270
0f82e5c9
DM
4271 SET_NEXT_INSN (to) = NEXT_INSN (after);
4272 SET_PREV_INSN (from) = after;
4273 SET_NEXT_INSN (after) = from;
c3284718 4274 if (after == get_last_insn ())
5936d944 4275 set_last_insn (to);
23b2ce53
RS
4276}
4277
3c030e88
JH
4278/* Same as function above, but take care to update BB boundaries. */
4279void
ac9d2d2c 4280reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
3c030e88 4281{
ac9d2d2c 4282 rtx_insn *prev = PREV_INSN (from);
3c030e88
JH
4283 basic_block bb, bb2;
4284
4285 reorder_insns_nobb (from, to, after);
4286
4b4bf941 4287 if (!BARRIER_P (after)
3c030e88
JH
4288 && (bb = BLOCK_FOR_INSN (after)))
4289 {
b2908ba6 4290 rtx_insn *x;
6fb5fa3c 4291 df_set_bb_dirty (bb);
68252e27 4292
4b4bf941 4293 if (!BARRIER_P (from)
3c030e88
JH
4294 && (bb2 = BLOCK_FOR_INSN (from)))
4295 {
a813c111 4296 if (BB_END (bb2) == to)
1130d5e3 4297 BB_END (bb2) = prev;
6fb5fa3c 4298 df_set_bb_dirty (bb2);
3c030e88
JH
4299 }
4300
a813c111 4301 if (BB_END (bb) == after)
1130d5e3 4302 BB_END (bb) = to;
3c030e88
JH
4303
4304 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4305 if (!BARRIER_P (x))
63642d5a 4306 df_insn_change_bb (x, bb);
3c030e88
JH
4307 }
4308}
4309
23b2ce53 4310\f
2f937369
DM
4311/* Emit insn(s) of given code and pattern
4312 at a specified place within the doubly-linked list.
23b2ce53 4313
2f937369
DM
4314 All of the emit_foo global entry points accept an object
4315 X which is either an insn list or a PATTERN of a single
4316 instruction.
23b2ce53 4317
2f937369
DM
4318 There are thus a few canonical ways to generate code and
4319 emit it at a specific place in the instruction stream. For
4320 example, consider the instruction named SPOT and the fact that
4321 we would like to emit some instructions before SPOT. We might
4322 do it like this:
23b2ce53 4323
2f937369
DM
4324 start_sequence ();
4325 ... emit the new instructions ...
4326 insns_head = get_insns ();
4327 end_sequence ();
23b2ce53 4328
2f937369 4329 emit_insn_before (insns_head, SPOT);
23b2ce53 4330
2f937369
DM
4331 It used to be common to generate SEQUENCE rtl instead, but that
4332 is a relic of the past which no longer occurs. The reason is that
4333 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4334 generated would almost certainly die right after it was created. */
23b2ce53 4335
cd459bf8 4336static rtx_insn *
5f02387d 4337emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
167b9fae 4338 rtx_insn *(*make_raw) (rtx))
23b2ce53 4339{
167b9fae 4340 rtx_insn *insn;
23b2ce53 4341
5b0264cb 4342 gcc_assert (before);
2f937369
DM
4343
4344 if (x == NULL_RTX)
cd459bf8 4345 return safe_as_a <rtx_insn *> (last);
2f937369
DM
4346
4347 switch (GET_CODE (x))
23b2ce53 4348 {
b5b8b0ac 4349 case DEBUG_INSN:
2f937369
DM
4350 case INSN:
4351 case JUMP_INSN:
4352 case CALL_INSN:
4353 case CODE_LABEL:
4354 case BARRIER:
4355 case NOTE:
167b9fae 4356 insn = as_a <rtx_insn *> (x);
2f937369
DM
4357 while (insn)
4358 {
167b9fae 4359 rtx_insn *next = NEXT_INSN (insn);
6fb5fa3c 4360 add_insn_before (insn, before, bb);
2f937369
DM
4361 last = insn;
4362 insn = next;
4363 }
4364 break;
4365
4366#ifdef ENABLE_RTL_CHECKING
4367 case SEQUENCE:
5b0264cb 4368 gcc_unreachable ();
2f937369
DM
4369 break;
4370#endif
4371
4372 default:
5f02387d 4373 last = (*make_raw) (x);
6fb5fa3c 4374 add_insn_before (last, before, bb);
2f937369 4375 break;
23b2ce53
RS
4376 }
4377
cd459bf8 4378 return safe_as_a <rtx_insn *> (last);
23b2ce53
RS
4379}
4380
5f02387d
NF
4381/* Make X be output before the instruction BEFORE. */
4382
cd459bf8 4383rtx_insn *
596f2b17 4384emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
5f02387d
NF
4385{
4386 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4387}
4388
2f937369 4389/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4390 and output it before the instruction BEFORE. */
4391
cd459bf8 4392rtx_insn *
596f2b17 4393emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
23b2ce53 4394{
5f02387d
NF
4395 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4396 make_jump_insn_raw);
23b2ce53
RS
4397}
4398
2f937369 4399/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4400 and output it before the instruction BEFORE. */
4401
cd459bf8 4402rtx_insn *
596f2b17 4403emit_call_insn_before_noloc (rtx x, rtx_insn *before)
969d70ca 4404{
5f02387d
NF
4405 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4406 make_call_insn_raw);
969d70ca
JH
4407}
4408
b5b8b0ac
AO
4409/* Make an instruction with body X and code DEBUG_INSN
4410 and output it before the instruction BEFORE. */
4411
cd459bf8 4412rtx_insn *
b5b8b0ac
AO
4413emit_debug_insn_before_noloc (rtx x, rtx before)
4414{
5f02387d
NF
4415 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4416 make_debug_insn_raw);
b5b8b0ac
AO
4417}
4418
23b2ce53 4419/* Make an insn of code BARRIER
e881bb1b 4420 and output it before the insn BEFORE. */
23b2ce53 4421
cd459bf8 4422rtx_barrier *
502b8322 4423emit_barrier_before (rtx before)
23b2ce53 4424{
cd459bf8 4425 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4426
4427 INSN_UID (insn) = cur_insn_uid++;
4428
6fb5fa3c 4429 add_insn_before (insn, before, NULL);
23b2ce53
RS
4430 return insn;
4431}
4432
e881bb1b
RH
4433/* Emit the label LABEL before the insn BEFORE. */
4434
cd459bf8 4435rtx_insn *
596f2b17 4436emit_label_before (rtx label, rtx_insn *before)
e881bb1b 4437{
468660d3
SB
4438 gcc_checking_assert (INSN_UID (label) == 0);
4439 INSN_UID (label) = cur_insn_uid++;
4440 add_insn_before (label, before, NULL);
cd459bf8 4441 return as_a <rtx_insn *> (label);
e881bb1b 4442}
23b2ce53 4443\f
2f937369
DM
4444/* Helper for emit_insn_after, handles lists of instructions
4445 efficiently. */
23b2ce53 4446
e6eda746
DM
4447static rtx_insn *
4448emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
23b2ce53 4449{
e6eda746 4450 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
1130d5e3
DM
4451 rtx_insn *last;
4452 rtx_insn *after_after;
6fb5fa3c
DB
4453 if (!bb && !BARRIER_P (after))
4454 bb = BLOCK_FOR_INSN (after);
23b2ce53 4455
6fb5fa3c 4456 if (bb)
23b2ce53 4457 {
6fb5fa3c 4458 df_set_bb_dirty (bb);
2f937369 4459 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4460 if (!BARRIER_P (last))
6fb5fa3c
DB
4461 {
4462 set_block_for_insn (last, bb);
4463 df_insn_rescan (last);
4464 }
4b4bf941 4465 if (!BARRIER_P (last))
6fb5fa3c
DB
4466 {
4467 set_block_for_insn (last, bb);
4468 df_insn_rescan (last);
4469 }
a813c111 4470 if (BB_END (bb) == after)
1130d5e3 4471 BB_END (bb) = last;
23b2ce53
RS
4472 }
4473 else
2f937369
DM
4474 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4475 continue;
4476
4477 after_after = NEXT_INSN (after);
4478
0f82e5c9
DM
4479 SET_NEXT_INSN (after) = first;
4480 SET_PREV_INSN (first) = after;
4481 SET_NEXT_INSN (last) = after_after;
2f937369 4482 if (after_after)
0f82e5c9 4483 SET_PREV_INSN (after_after) = last;
2f937369 4484
c3284718 4485 if (after == get_last_insn ())
5936d944 4486 set_last_insn (last);
e855c69d 4487
2f937369
DM
4488 return last;
4489}
4490
cd459bf8 4491static rtx_insn *
e6eda746 4492emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
167b9fae 4493 rtx_insn *(*make_raw)(rtx))
2f937369 4494{
e6eda746
DM
4495 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4496 rtx_insn *last = after;
2f937369 4497
5b0264cb 4498 gcc_assert (after);
2f937369
DM
4499
4500 if (x == NULL_RTX)
e6eda746 4501 return last;
2f937369
DM
4502
4503 switch (GET_CODE (x))
23b2ce53 4504 {
b5b8b0ac 4505 case DEBUG_INSN:
2f937369
DM
4506 case INSN:
4507 case JUMP_INSN:
4508 case CALL_INSN:
4509 case CODE_LABEL:
4510 case BARRIER:
4511 case NOTE:
1130d5e3 4512 last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
2f937369
DM
4513 break;
4514
4515#ifdef ENABLE_RTL_CHECKING
4516 case SEQUENCE:
5b0264cb 4517 gcc_unreachable ();
2f937369
DM
4518 break;
4519#endif
4520
4521 default:
5f02387d 4522 last = (*make_raw) (x);
6fb5fa3c 4523 add_insn_after (last, after, bb);
2f937369 4524 break;
23b2ce53
RS
4525 }
4526
e6eda746 4527 return last;
23b2ce53
RS
4528}
4529
5f02387d
NF
4530/* Make X be output after the insn AFTER and set the BB of insn. If
4531 BB is NULL, an attempt is made to infer the BB from AFTER. */
4532
cd459bf8 4533rtx_insn *
5f02387d
NF
4534emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4535{
4536 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4537}
4538
255680cf 4539
2f937369 4540/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4541 and output it after the insn AFTER. */
4542
cd459bf8 4543rtx_insn *
a7102479 4544emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4545{
5f02387d 4546 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
2f937369
DM
4547}
4548
4549/* Make an instruction with body X and code CALL_INSN
4550 and output it after the instruction AFTER. */
4551
cd459bf8 4552rtx_insn *
a7102479 4553emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4554{
5f02387d 4555 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4556}
4557
b5b8b0ac
AO
4558/* Make an instruction with body X and code CALL_INSN
4559 and output it after the instruction AFTER. */
4560
cd459bf8 4561rtx_insn *
b5b8b0ac
AO
4562emit_debug_insn_after_noloc (rtx x, rtx after)
4563{
5f02387d 4564 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4565}
4566
23b2ce53
RS
4567/* Make an insn of code BARRIER
4568 and output it after the insn AFTER. */
4569
cd459bf8 4570rtx_barrier *
502b8322 4571emit_barrier_after (rtx after)
23b2ce53 4572{
cd459bf8 4573 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
4574
4575 INSN_UID (insn) = cur_insn_uid++;
4576
6fb5fa3c 4577 add_insn_after (insn, after, NULL);
23b2ce53
RS
4578 return insn;
4579}
4580
4581/* Emit the label LABEL after the insn AFTER. */
4582
cd459bf8 4583rtx_insn *
596f2b17 4584emit_label_after (rtx label, rtx_insn *after)
23b2ce53 4585{
468660d3
SB
4586 gcc_checking_assert (INSN_UID (label) == 0);
4587 INSN_UID (label) = cur_insn_uid++;
4588 add_insn_after (label, after, NULL);
cd459bf8 4589 return as_a <rtx_insn *> (label);
23b2ce53 4590}
96fba521
SB
4591\f
4592/* Notes require a bit of special handling: Some notes need to have their
4593 BLOCK_FOR_INSN set, others should never have it set, and some should
4594 have it set or clear depending on the context. */
4595
4596/* Return true iff a note of kind SUBTYPE should be emitted with routines
4597 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4598 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4599
4600static bool
4601note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4602{
4603 switch (subtype)
4604 {
4605 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4606 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4607 return true;
4608
4609 /* Notes for var tracking and EH region markers can appear between or
4610 inside basic blocks. If the caller is emitting on the basic block
4611 boundary, do not set BLOCK_FOR_INSN on the new note. */
4612 case NOTE_INSN_VAR_LOCATION:
4613 case NOTE_INSN_CALL_ARG_LOCATION:
4614 case NOTE_INSN_EH_REGION_BEG:
4615 case NOTE_INSN_EH_REGION_END:
4616 return on_bb_boundary_p;
4617
4618 /* Otherwise, BLOCK_FOR_INSN must be set. */
4619 default:
4620 return false;
4621 }
4622}
23b2ce53
RS
4623
4624/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4625
66e8df53 4626rtx_note *
9152e0aa 4627emit_note_after (enum insn_note subtype, rtx uncast_after)
23b2ce53 4628{
9152e0aa 4629 rtx_insn *after = as_a <rtx_insn *> (uncast_after);
66e8df53 4630 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4631 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4632 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4633
4634 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4635 add_insn_after_nobb (note, after);
4636 else
4637 add_insn_after (note, after, bb);
4638 return note;
4639}
4640
4641/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4642
66e8df53 4643rtx_note *
9152e0aa 4644emit_note_before (enum insn_note subtype, rtx uncast_before)
96fba521 4645{
9152e0aa 4646 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
66e8df53 4647 rtx_note *note = make_note_raw (subtype);
96fba521
SB
4648 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4649 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4650
4651 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4652 add_insn_before_nobb (note, before);
4653 else
4654 add_insn_before (note, before, bb);
23b2ce53
RS
4655 return note;
4656}
23b2ce53 4657\f
e8110d6f
NF
4658/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4659 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4660
cd459bf8 4661static rtx_insn *
dc01c3d1 4662emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
167b9fae 4663 rtx_insn *(*make_raw) (rtx))
0d682900 4664{
dc01c3d1 4665 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
e8110d6f 4666 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4667
a7102479 4668 if (pattern == NULL_RTX || !loc)
cd459bf8 4669 return safe_as_a <rtx_insn *> (last);
dd3adcf8 4670
2f937369
DM
4671 after = NEXT_INSN (after);
4672 while (1)
4673 {
5368224f
DC
4674 if (active_insn_p (after) && !INSN_LOCATION (after))
4675 INSN_LOCATION (after) = loc;
2f937369
DM
4676 if (after == last)
4677 break;
4678 after = NEXT_INSN (after);
4679 }
cd459bf8 4680 return safe_as_a <rtx_insn *> (last);
0d682900
JH
4681}
4682
e8110d6f
NF
4683/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4684 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4685 any DEBUG_INSNs. */
4686
cd459bf8 4687static rtx_insn *
dc01c3d1 4688emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
167b9fae 4689 rtx_insn *(*make_raw) (rtx))
a7102479 4690{
dc01c3d1
DM
4691 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4692 rtx_insn *prev = after;
b5b8b0ac 4693
e8110d6f
NF
4694 if (skip_debug_insns)
4695 while (DEBUG_INSN_P (prev))
4696 prev = PREV_INSN (prev);
b5b8b0ac
AO
4697
4698 if (INSN_P (prev))
5368224f 4699 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
e8110d6f 4700 make_raw);
a7102479 4701 else
e8110d6f 4702 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4703}
4704
5368224f 4705/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4706rtx_insn *
e8110d6f 4707emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4708{
e8110d6f
NF
4709 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4710}
2f937369 4711
5368224f 4712/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4713rtx_insn *
e8110d6f
NF
4714emit_insn_after (rtx pattern, rtx after)
4715{
4716 return emit_pattern_after (pattern, after, true, make_insn_raw);
4717}
dd3adcf8 4718
5368224f 4719/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4720rtx_insn *
e8110d6f
NF
4721emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4722{
4723 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
0d682900
JH
4724}
4725
5368224f 4726/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4727rtx_insn *
a7102479
JH
4728emit_jump_insn_after (rtx pattern, rtx after)
4729{
e8110d6f 4730 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
a7102479
JH
4731}
4732
5368224f 4733/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4734rtx_insn *
502b8322 4735emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4736{
e8110d6f 4737 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4738}
4739
5368224f 4740/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4741rtx_insn *
a7102479
JH
4742emit_call_insn_after (rtx pattern, rtx after)
4743{
e8110d6f 4744 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4745}
4746
5368224f 4747/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4748rtx_insn *
b5b8b0ac
AO
4749emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4750{
e8110d6f 4751 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4752}
4753
5368224f 4754/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
cd459bf8 4755rtx_insn *
b5b8b0ac
AO
4756emit_debug_insn_after (rtx pattern, rtx after)
4757{
e8110d6f 4758 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4759}
4760
e8110d6f
NF
4761/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4762 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4763 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4764 CALL_INSN, etc. */
4765
cd459bf8 4766static rtx_insn *
dc01c3d1 4767emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
167b9fae 4768 rtx_insn *(*make_raw) (rtx))
0d682900 4769{
dc01c3d1
DM
4770 rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4771 rtx_insn *first = PREV_INSN (before);
4772 rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4773 insnp ? before : NULL_RTX,
4774 NULL, make_raw);
a7102479
JH
4775
4776 if (pattern == NULL_RTX || !loc)
dc01c3d1 4777 return last;
a7102479 4778
26cb3993
JH
4779 if (!first)
4780 first = get_insns ();
4781 else
4782 first = NEXT_INSN (first);
a7102479
JH
4783 while (1)
4784 {
5368224f
DC
4785 if (active_insn_p (first) && !INSN_LOCATION (first))
4786 INSN_LOCATION (first) = loc;
a7102479
JH
4787 if (first == last)
4788 break;
4789 first = NEXT_INSN (first);
4790 }
dc01c3d1 4791 return last;
a7102479
JH
4792}
4793
e8110d6f
NF
4794/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4795 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4796 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4797 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4798
cd459bf8 4799static rtx_insn *
dc01c3d1 4800emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
167b9fae 4801 bool insnp, rtx_insn *(*make_raw) (rtx))
a7102479 4802{
dc01c3d1
DM
4803 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4804 rtx_insn *next = before;
b5b8b0ac 4805
e8110d6f
NF
4806 if (skip_debug_insns)
4807 while (DEBUG_INSN_P (next))
4808 next = PREV_INSN (next);
b5b8b0ac
AO
4809
4810 if (INSN_P (next))
5368224f 4811 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
e8110d6f 4812 insnp, make_raw);
a7102479 4813 else
e8110d6f
NF
4814 return emit_pattern_before_noloc (pattern, before,
4815 insnp ? before : NULL_RTX,
4816 NULL, make_raw);
a7102479
JH
4817}
4818
5368224f 4819/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4820rtx_insn *
596f2b17 4821emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4822{
e8110d6f
NF
4823 return emit_pattern_before_setloc (pattern, before, loc, true,
4824 make_insn_raw);
4825}
a7102479 4826
5368224f 4827/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4828rtx_insn *
e8110d6f
NF
4829emit_insn_before (rtx pattern, rtx before)
4830{
4831 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4832}
a7102479 4833
5368224f 4834/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4835rtx_insn *
596f2b17 4836emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
e8110d6f
NF
4837{
4838 return emit_pattern_before_setloc (pattern, before, loc, false,
4839 make_jump_insn_raw);
a7102479
JH
4840}
4841
5368224f 4842/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
cd459bf8 4843rtx_insn *
a7102479
JH
4844emit_jump_insn_before (rtx pattern, rtx before)
4845{
e8110d6f
NF
4846 return emit_pattern_before (pattern, before, true, false,
4847 make_jump_insn_raw);
a7102479
JH
4848}
4849
5368224f 4850/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4851rtx_insn *
596f2b17 4852emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
a7102479 4853{
e8110d6f
NF
4854 return emit_pattern_before_setloc (pattern, before, loc, false,
4855 make_call_insn_raw);
0d682900 4856}
a7102479 4857
e8110d6f 4858/* Like emit_call_insn_before_noloc,
5368224f 4859 but set insn_location according to BEFORE. */
cd459bf8 4860rtx_insn *
596f2b17 4861emit_call_insn_before (rtx pattern, rtx_insn *before)
a7102479 4862{
e8110d6f
NF
4863 return emit_pattern_before (pattern, before, true, false,
4864 make_call_insn_raw);
a7102479 4865}
b5b8b0ac 4866
5368224f 4867/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
cd459bf8 4868rtx_insn *
b5b8b0ac
AO
4869emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4870{
e8110d6f
NF
4871 return emit_pattern_before_setloc (pattern, before, loc, false,
4872 make_debug_insn_raw);
b5b8b0ac
AO
4873}
4874
e8110d6f 4875/* Like emit_debug_insn_before_noloc,
5368224f 4876 but set insn_location according to BEFORE. */
cd459bf8 4877rtx_insn *
b5b8b0ac
AO
4878emit_debug_insn_before (rtx pattern, rtx before)
4879{
e8110d6f
NF
4880 return emit_pattern_before (pattern, before, false, false,
4881 make_debug_insn_raw);
b5b8b0ac 4882}
0d682900 4883\f
2f937369
DM
4884/* Take X and emit it at the end of the doubly-linked
4885 INSN list.
23b2ce53
RS
4886
4887 Returns the last insn emitted. */
4888
cd459bf8 4889rtx_insn *
502b8322 4890emit_insn (rtx x)
23b2ce53 4891{
cd459bf8
DM
4892 rtx_insn *last = get_last_insn ();
4893 rtx_insn *insn;
23b2ce53 4894
2f937369
DM
4895 if (x == NULL_RTX)
4896 return last;
23b2ce53 4897
2f937369
DM
4898 switch (GET_CODE (x))
4899 {
b5b8b0ac 4900 case DEBUG_INSN:
2f937369
DM
4901 case INSN:
4902 case JUMP_INSN:
4903 case CALL_INSN:
4904 case CODE_LABEL:
4905 case BARRIER:
4906 case NOTE:
cd459bf8 4907 insn = as_a <rtx_insn *> (x);
2f937369 4908 while (insn)
23b2ce53 4909 {
cd459bf8 4910 rtx_insn *next = NEXT_INSN (insn);
23b2ce53 4911 add_insn (insn);
2f937369
DM
4912 last = insn;
4913 insn = next;
23b2ce53 4914 }
2f937369 4915 break;
23b2ce53 4916
2f937369 4917#ifdef ENABLE_RTL_CHECKING
39718607 4918 case JUMP_TABLE_DATA:
2f937369 4919 case SEQUENCE:
5b0264cb 4920 gcc_unreachable ();
2f937369
DM
4921 break;
4922#endif
23b2ce53 4923
2f937369
DM
4924 default:
4925 last = make_insn_raw (x);
4926 add_insn (last);
4927 break;
23b2ce53
RS
4928 }
4929
4930 return last;
4931}
4932
b5b8b0ac
AO
4933/* Make an insn of code DEBUG_INSN with pattern X
4934 and add it to the end of the doubly-linked list. */
4935
cd459bf8 4936rtx_insn *
b5b8b0ac
AO
4937emit_debug_insn (rtx x)
4938{
cd459bf8
DM
4939 rtx_insn *last = get_last_insn ();
4940 rtx_insn *insn;
b5b8b0ac
AO
4941
4942 if (x == NULL_RTX)
4943 return last;
4944
4945 switch (GET_CODE (x))
4946 {
4947 case DEBUG_INSN:
4948 case INSN:
4949 case JUMP_INSN:
4950 case CALL_INSN:
4951 case CODE_LABEL:
4952 case BARRIER:
4953 case NOTE:
cd459bf8 4954 insn = as_a <rtx_insn *> (x);
b5b8b0ac
AO
4955 while (insn)
4956 {
cd459bf8 4957 rtx_insn *next = NEXT_INSN (insn);
b5b8b0ac
AO
4958 add_insn (insn);
4959 last = insn;
4960 insn = next;
4961 }
4962 break;
4963
4964#ifdef ENABLE_RTL_CHECKING
39718607 4965 case JUMP_TABLE_DATA:
b5b8b0ac
AO
4966 case SEQUENCE:
4967 gcc_unreachable ();
4968 break;
4969#endif
4970
4971 default:
4972 last = make_debug_insn_raw (x);
4973 add_insn (last);
4974 break;
4975 }
4976
4977 return last;
4978}
4979
2f937369
DM
4980/* Make an insn of code JUMP_INSN with pattern X
4981 and add it to the end of the doubly-linked list. */
23b2ce53 4982
cd459bf8 4983rtx_insn *
502b8322 4984emit_jump_insn (rtx x)
23b2ce53 4985{
cd459bf8
DM
4986 rtx_insn *last = NULL;
4987 rtx_insn *insn;
23b2ce53 4988
2f937369 4989 switch (GET_CODE (x))
23b2ce53 4990 {
b5b8b0ac 4991 case DEBUG_INSN:
2f937369
DM
4992 case INSN:
4993 case JUMP_INSN:
4994 case CALL_INSN:
4995 case CODE_LABEL:
4996 case BARRIER:
4997 case NOTE:
cd459bf8 4998 insn = as_a <rtx_insn *> (x);
2f937369
DM
4999 while (insn)
5000 {
cd459bf8 5001 rtx_insn *next = NEXT_INSN (insn);
2f937369
DM
5002 add_insn (insn);
5003 last = insn;
5004 insn = next;
5005 }
5006 break;
e0a5c5eb 5007
2f937369 5008#ifdef ENABLE_RTL_CHECKING
39718607 5009 case JUMP_TABLE_DATA:
2f937369 5010 case SEQUENCE:
5b0264cb 5011 gcc_unreachable ();
2f937369
DM
5012 break;
5013#endif
e0a5c5eb 5014
2f937369
DM
5015 default:
5016 last = make_jump_insn_raw (x);
5017 add_insn (last);
5018 break;
3c030e88 5019 }
e0a5c5eb
RS
5020
5021 return last;
5022}
5023
2f937369 5024/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
5025 and add it to the end of the doubly-linked list. */
5026
cd459bf8 5027rtx_insn *
502b8322 5028emit_call_insn (rtx x)
23b2ce53 5029{
cd459bf8 5030 rtx_insn *insn;
2f937369
DM
5031
5032 switch (GET_CODE (x))
23b2ce53 5033 {
b5b8b0ac 5034 case DEBUG_INSN:
2f937369
DM
5035 case INSN:
5036 case JUMP_INSN:
5037 case CALL_INSN:
5038 case CODE_LABEL:
5039 case BARRIER:
5040 case NOTE:
5041 insn = emit_insn (x);
5042 break;
23b2ce53 5043
2f937369
DM
5044#ifdef ENABLE_RTL_CHECKING
5045 case SEQUENCE:
39718607 5046 case JUMP_TABLE_DATA:
5b0264cb 5047 gcc_unreachable ();
2f937369
DM
5048 break;
5049#endif
23b2ce53 5050
2f937369
DM
5051 default:
5052 insn = make_call_insn_raw (x);
23b2ce53 5053 add_insn (insn);
2f937369 5054 break;
23b2ce53 5055 }
2f937369
DM
5056
5057 return insn;
23b2ce53
RS
5058}
5059
5060/* Add the label LABEL to the end of the doubly-linked list. */
5061
cd459bf8 5062rtx_insn *
502b8322 5063emit_label (rtx label)
23b2ce53 5064{
468660d3
SB
5065 gcc_checking_assert (INSN_UID (label) == 0);
5066 INSN_UID (label) = cur_insn_uid++;
9152e0aa 5067 add_insn (as_a <rtx_insn *> (label));
cd459bf8 5068 return as_a <rtx_insn *> (label);
23b2ce53
RS
5069}
5070
39718607
SB
5071/* Make an insn of code JUMP_TABLE_DATA
5072 and add it to the end of the doubly-linked list. */
5073
4598afdd 5074rtx_jump_table_data *
39718607
SB
5075emit_jump_table_data (rtx table)
5076{
4598afdd
DM
5077 rtx_jump_table_data *jump_table_data =
5078 as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
39718607
SB
5079 INSN_UID (jump_table_data) = cur_insn_uid++;
5080 PATTERN (jump_table_data) = table;
5081 BLOCK_FOR_INSN (jump_table_data) = NULL;
5082 add_insn (jump_table_data);
5083 return jump_table_data;
5084}
5085
23b2ce53
RS
5086/* Make an insn of code BARRIER
5087 and add it to the end of the doubly-linked list. */
5088
cd459bf8 5089rtx_barrier *
502b8322 5090emit_barrier (void)
23b2ce53 5091{
cd459bf8 5092 rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
23b2ce53
RS
5093 INSN_UID (barrier) = cur_insn_uid++;
5094 add_insn (barrier);
5095 return barrier;
5096}
5097
5f2fc772 5098/* Emit a copy of note ORIG. */
502b8322 5099
66e8df53
DM
5100rtx_note *
5101emit_note_copy (rtx_note *orig)
5f2fc772 5102{
96fba521 5103 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
66e8df53 5104 rtx_note *note = make_note_raw (kind);
5f2fc772 5105 NOTE_DATA (note) = NOTE_DATA (orig);
5f2fc772 5106 add_insn (note);
2e040219 5107 return note;
23b2ce53
RS
5108}
5109
2e040219
NS
5110/* Make an insn of code NOTE or type NOTE_NO
5111 and add it to the end of the doubly-linked list. */
23b2ce53 5112
66e8df53 5113rtx_note *
a38e7aa5 5114emit_note (enum insn_note kind)
23b2ce53 5115{
66e8df53 5116 rtx_note *note = make_note_raw (kind);
23b2ce53
RS
5117 add_insn (note);
5118 return note;
5119}
5120
c41c1387
RS
5121/* Emit a clobber of lvalue X. */
5122
cd459bf8 5123rtx_insn *
c41c1387
RS
5124emit_clobber (rtx x)
5125{
5126 /* CONCATs should not appear in the insn stream. */
5127 if (GET_CODE (x) == CONCAT)
5128 {
5129 emit_clobber (XEXP (x, 0));
5130 return emit_clobber (XEXP (x, 1));
5131 }
5132 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5133}
5134
5135/* Return a sequence of insns to clobber lvalue X. */
5136
cd459bf8 5137rtx_insn *
c41c1387
RS
5138gen_clobber (rtx x)
5139{
cd459bf8 5140 rtx_insn *seq;
c41c1387
RS
5141
5142 start_sequence ();
5143 emit_clobber (x);
5144 seq = get_insns ();
5145 end_sequence ();
5146 return seq;
5147}
5148
5149/* Emit a use of rvalue X. */
5150
cd459bf8 5151rtx_insn *
c41c1387
RS
5152emit_use (rtx x)
5153{
5154 /* CONCATs should not appear in the insn stream. */
5155 if (GET_CODE (x) == CONCAT)
5156 {
5157 emit_use (XEXP (x, 0));
5158 return emit_use (XEXP (x, 1));
5159 }
5160 return emit_insn (gen_rtx_USE (VOIDmode, x));
5161}
5162
5163/* Return a sequence of insns to use rvalue X. */
5164
cd459bf8 5165rtx_insn *
c41c1387
RS
5166gen_use (rtx x)
5167{
cd459bf8 5168 rtx_insn *seq;
c41c1387
RS
5169
5170 start_sequence ();
5171 emit_use (x);
5172 seq = get_insns ();
5173 end_sequence ();
5174 return seq;
5175}
5176
c8912e53
RS
5177/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5178 Return the set in INSN that such notes describe, or NULL if the notes
5179 have no meaning for INSN. */
5180
5181rtx
5182set_for_reg_notes (rtx insn)
5183{
5184 rtx pat, reg;
5185
5186 if (!INSN_P (insn))
5187 return NULL_RTX;
5188
5189 pat = PATTERN (insn);
5190 if (GET_CODE (pat) == PARALLEL)
5191 {
5192 /* We do not use single_set because that ignores SETs of unused
5193 registers. REG_EQUAL and REG_EQUIV notes really do require the
5194 PARALLEL to have a single SET. */
5195 if (multiple_sets (insn))
5196 return NULL_RTX;
5197 pat = XVECEXP (pat, 0, 0);
5198 }
5199
5200 if (GET_CODE (pat) != SET)
5201 return NULL_RTX;
5202
5203 reg = SET_DEST (pat);
5204
5205 /* Notes apply to the contents of a STRICT_LOW_PART. */
5206 if (GET_CODE (reg) == STRICT_LOW_PART)
5207 reg = XEXP (reg, 0);
5208
5209 /* Check that we have a register. */
5210 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5211 return NULL_RTX;
5212
5213 return pat;
5214}
5215
87b47c85 5216/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5217 note of this type already exists, remove it first. */
87b47c85 5218
3d238248 5219rtx
502b8322 5220set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5221{
5222 rtx note = find_reg_note (insn, kind, NULL_RTX);
5223
52488da1
JW
5224 switch (kind)
5225 {
5226 case REG_EQUAL:
5227 case REG_EQUIV:
c8912e53
RS
5228 if (!set_for_reg_notes (insn))
5229 return NULL_RTX;
52488da1
JW
5230
5231 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5232 It serves no useful purpose and breaks eliminate_regs. */
5233 if (GET_CODE (datum) == ASM_OPERANDS)
5234 return NULL_RTX;
109374e2
RS
5235
5236 /* Notes with side effects are dangerous. Even if the side-effect
5237 initially mirrors one in PATTERN (INSN), later optimizations
5238 might alter the way that the final register value is calculated
5239 and so move or alter the side-effect in some way. The note would
5240 then no longer be a valid substitution for SET_SRC. */
5241 if (side_effects_p (datum))
5242 return NULL_RTX;
52488da1
JW
5243 break;
5244
5245 default:
5246 break;
5247 }
3d238248 5248
c8912e53
RS
5249 if (note)
5250 XEXP (note, 0) = datum;
5251 else
5252 {
5253 add_reg_note (insn, kind, datum);
5254 note = REG_NOTES (insn);
5255 }
6fb5fa3c
DB
5256
5257 switch (kind)
3d238248 5258 {
6fb5fa3c
DB
5259 case REG_EQUAL:
5260 case REG_EQUIV:
b2908ba6 5261 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
5262 break;
5263 default:
5264 break;
3d238248 5265 }
87b47c85 5266
c8912e53 5267 return note;
87b47c85 5268}
7543f918
JR
5269
5270/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5271rtx
5272set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5273{
c8912e53 5274 rtx set = set_for_reg_notes (insn);
7543f918
JR
5275
5276 if (set && SET_DEST (set) == dst)
5277 return set_unique_reg_note (insn, kind, datum);
5278 return NULL_RTX;
5279}
23b2ce53
RS
5280\f
5281/* Return an indication of which type of insn should have X as a body.
5282 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5283
d78db459 5284static enum rtx_code
502b8322 5285classify_insn (rtx x)
23b2ce53 5286{
4b4bf941 5287 if (LABEL_P (x))
23b2ce53
RS
5288 return CODE_LABEL;
5289 if (GET_CODE (x) == CALL)
5290 return CALL_INSN;
26898771 5291 if (ANY_RETURN_P (x))
23b2ce53
RS
5292 return JUMP_INSN;
5293 if (GET_CODE (x) == SET)
5294 {
5295 if (SET_DEST (x) == pc_rtx)
5296 return JUMP_INSN;
5297 else if (GET_CODE (SET_SRC (x)) == CALL)
5298 return CALL_INSN;
5299 else
5300 return INSN;
5301 }
5302 if (GET_CODE (x) == PARALLEL)
5303 {
b3694847 5304 int j;
23b2ce53
RS
5305 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5306 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5307 return CALL_INSN;
5308 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5309 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5310 return JUMP_INSN;
5311 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5312 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5313 return CALL_INSN;
5314 }
5315 return INSN;
5316}
5317
5318/* Emit the rtl pattern X as an appropriate kind of insn.
5319 If X is a label, it is simply added into the insn chain. */
5320
cd459bf8 5321rtx_insn *
502b8322 5322emit (rtx x)
23b2ce53
RS
5323{
5324 enum rtx_code code = classify_insn (x);
5325
5b0264cb 5326 switch (code)
23b2ce53 5327 {
5b0264cb
NS
5328 case CODE_LABEL:
5329 return emit_label (x);
5330 case INSN:
5331 return emit_insn (x);
5332 case JUMP_INSN:
5333 {
cd459bf8 5334 rtx_insn *insn = emit_jump_insn (x);
5b0264cb
NS
5335 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5336 return emit_barrier ();
5337 return insn;
5338 }
5339 case CALL_INSN:
5340 return emit_call_insn (x);
b5b8b0ac
AO
5341 case DEBUG_INSN:
5342 return emit_debug_insn (x);
5b0264cb
NS
5343 default:
5344 gcc_unreachable ();
23b2ce53 5345 }
23b2ce53
RS
5346}
5347\f
e2500fed 5348/* Space for free sequence stack entries. */
1431042e 5349static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5350
4dfa0342
RH
5351/* Begin emitting insns to a sequence. If this sequence will contain
5352 something that might cause the compiler to pop arguments to function
5353 calls (because those pops have previously been deferred; see
5354 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5355 before calling this function. That will ensure that the deferred
5356 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5357
5358void
502b8322 5359start_sequence (void)
23b2ce53
RS
5360{
5361 struct sequence_stack *tem;
5362
e2500fed
GK
5363 if (free_sequence_stack != NULL)
5364 {
5365 tem = free_sequence_stack;
5366 free_sequence_stack = tem->next;
5367 }
5368 else
766090c2 5369 tem = ggc_alloc<sequence_stack> ();
23b2ce53 5370
49ad7cfa 5371 tem->next = seq_stack;
5936d944
JH
5372 tem->first = get_insns ();
5373 tem->last = get_last_insn ();
23b2ce53 5374
49ad7cfa 5375 seq_stack = tem;
23b2ce53 5376
5936d944
JH
5377 set_first_insn (0);
5378 set_last_insn (0);
23b2ce53
RS
5379}
5380
5c7a310f
MM
5381/* Set up the insn chain starting with FIRST as the current sequence,
5382 saving the previously current one. See the documentation for
5383 start_sequence for more information about how to use this function. */
23b2ce53
RS
5384
5385void
fee3e72c 5386push_to_sequence (rtx_insn *first)
23b2ce53 5387{
fee3e72c 5388 rtx_insn *last;
23b2ce53
RS
5389
5390 start_sequence ();
5391
e84a58ff
EB
5392 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5393 ;
23b2ce53 5394
5936d944
JH
5395 set_first_insn (first);
5396 set_last_insn (last);
23b2ce53
RS
5397}
5398
bb27eeda
SE
5399/* Like push_to_sequence, but take the last insn as an argument to avoid
5400 looping through the list. */
5401
5402void
fee3e72c 5403push_to_sequence2 (rtx_insn *first, rtx_insn *last)
bb27eeda
SE
5404{
5405 start_sequence ();
5406
5936d944
JH
5407 set_first_insn (first);
5408 set_last_insn (last);
bb27eeda
SE
5409}
5410
f15ae3a1
TW
5411/* Set up the outer-level insn chain
5412 as the current sequence, saving the previously current one. */
5413
5414void
502b8322 5415push_topmost_sequence (void)
f15ae3a1 5416{
aefdd5ab 5417 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5418
5419 start_sequence ();
5420
49ad7cfa 5421 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5422 top = stack;
5423
5936d944
JH
5424 set_first_insn (top->first);
5425 set_last_insn (top->last);
f15ae3a1
TW
5426}
5427
5428/* After emitting to the outer-level insn chain, update the outer-level
5429 insn chain, and restore the previous saved state. */
5430
5431void
502b8322 5432pop_topmost_sequence (void)
f15ae3a1 5433{
aefdd5ab 5434 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5435
49ad7cfa 5436 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5437 top = stack;
5438
5936d944
JH
5439 top->first = get_insns ();
5440 top->last = get_last_insn ();
f15ae3a1
TW
5441
5442 end_sequence ();
5443}
5444
23b2ce53
RS
5445/* After emitting to a sequence, restore previous saved state.
5446
5c7a310f 5447 To get the contents of the sequence just made, you must call
2f937369 5448 `get_insns' *before* calling here.
5c7a310f
MM
5449
5450 If the compiler might have deferred popping arguments while
5451 generating this sequence, and this sequence will not be immediately
5452 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5453 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5454 pops are inserted into this sequence, and not into some random
5455 location in the instruction stream. See INHIBIT_DEFER_POP for more
5456 information about deferred popping of arguments. */
23b2ce53
RS
5457
5458void
502b8322 5459end_sequence (void)
23b2ce53 5460{
49ad7cfa 5461 struct sequence_stack *tem = seq_stack;
23b2ce53 5462
5936d944
JH
5463 set_first_insn (tem->first);
5464 set_last_insn (tem->last);
49ad7cfa 5465 seq_stack = tem->next;
23b2ce53 5466
e2500fed
GK
5467 memset (tem, 0, sizeof (*tem));
5468 tem->next = free_sequence_stack;
5469 free_sequence_stack = tem;
23b2ce53
RS
5470}
5471
5472/* Return 1 if currently emitting into a sequence. */
5473
5474int
502b8322 5475in_sequence_p (void)
23b2ce53 5476{
49ad7cfa 5477 return seq_stack != 0;
23b2ce53 5478}
23b2ce53 5479\f
59ec66dc
MM
5480/* Put the various virtual registers into REGNO_REG_RTX. */
5481
2bbdec73 5482static void
bd60bab2 5483init_virtual_regs (void)
59ec66dc 5484{
bd60bab2
JH
5485 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5486 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5487 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5488 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5489 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5490 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5491 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5492}
5493
da43a810
BS
5494\f
5495/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5496static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5497static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5498static int copy_insn_n_scratches;
5499
5500/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5501 copied an ASM_OPERANDS.
5502 In that case, it is the original input-operand vector. */
5503static rtvec orig_asm_operands_vector;
5504
5505/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5506 copied an ASM_OPERANDS.
5507 In that case, it is the copied input-operand vector. */
5508static rtvec copy_asm_operands_vector;
5509
5510/* Likewise for the constraints vector. */
5511static rtvec orig_asm_constraints_vector;
5512static rtvec copy_asm_constraints_vector;
5513
5514/* Recursively create a new copy of an rtx for copy_insn.
5515 This function differs from copy_rtx in that it handles SCRATCHes and
5516 ASM_OPERANDs properly.
5517 Normally, this function is not used directly; use copy_insn as front end.
5518 However, you could first copy an insn pattern with copy_insn and then use
5519 this function afterwards to properly copy any REG_NOTEs containing
5520 SCRATCHes. */
5521
5522rtx
502b8322 5523copy_insn_1 (rtx orig)
da43a810 5524{
b3694847
SS
5525 rtx copy;
5526 int i, j;
5527 RTX_CODE code;
5528 const char *format_ptr;
da43a810 5529
cd9c1ca8
RH
5530 if (orig == NULL)
5531 return NULL;
5532
da43a810
BS
5533 code = GET_CODE (orig);
5534
5535 switch (code)
5536 {
5537 case REG:
a52a87c3 5538 case DEBUG_EXPR:
d8116890 5539 CASE_CONST_ANY:
da43a810
BS
5540 case SYMBOL_REF:
5541 case CODE_LABEL:
5542 case PC:
5543 case CC0:
276e0224 5544 case RETURN:
26898771 5545 case SIMPLE_RETURN:
da43a810 5546 return orig;
3e89ed8d 5547 case CLOBBER:
c5c5ba89
JH
5548 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5549 clobbers or clobbers of hard registers that originated as pseudos.
5550 This is needed to allow safe register renaming. */
5551 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5552 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
3e89ed8d
JH
5553 return orig;
5554 break;
da43a810
BS
5555
5556 case SCRATCH:
5557 for (i = 0; i < copy_insn_n_scratches; i++)
5558 if (copy_insn_scratch_in[i] == orig)
5559 return copy_insn_scratch_out[i];
5560 break;
5561
5562 case CONST:
6fb5fa3c 5563 if (shared_const_p (orig))
da43a810
BS
5564 return orig;
5565 break;
750c9258 5566
da43a810
BS
5567 /* A MEM with a constant address is not sharable. The problem is that
5568 the constant address may need to be reloaded. If the mem is shared,
5569 then reloading one copy of this mem will cause all copies to appear
5570 to have been reloaded. */
5571
5572 default:
5573 break;
5574 }
5575
aacd3885
RS
5576 /* Copy the various flags, fields, and other information. We assume
5577 that all fields need copying, and then clear the fields that should
da43a810
BS
5578 not be copied. That is the sensible default behavior, and forces
5579 us to explicitly document why we are *not* copying a flag. */
aacd3885 5580 copy = shallow_copy_rtx (orig);
da43a810
BS
5581
5582 /* We do not copy the USED flag, which is used as a mark bit during
5583 walks over the RTL. */
2adc7f12 5584 RTX_FLAG (copy, used) = 0;
da43a810
BS
5585
5586 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5587 if (INSN_P (orig))
da43a810 5588 {
2adc7f12
JJ
5589 RTX_FLAG (copy, jump) = 0;
5590 RTX_FLAG (copy, call) = 0;
5591 RTX_FLAG (copy, frame_related) = 0;
da43a810 5592 }
750c9258 5593
da43a810
BS
5594 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5595
5596 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5597 switch (*format_ptr++)
5598 {
5599 case 'e':
5600 if (XEXP (orig, i) != NULL)
5601 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5602 break;
da43a810 5603
aacd3885
RS
5604 case 'E':
5605 case 'V':
5606 if (XVEC (orig, i) == orig_asm_constraints_vector)
5607 XVEC (copy, i) = copy_asm_constraints_vector;
5608 else if (XVEC (orig, i) == orig_asm_operands_vector)
5609 XVEC (copy, i) = copy_asm_operands_vector;
5610 else if (XVEC (orig, i) != NULL)
5611 {
5612 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5613 for (j = 0; j < XVECLEN (copy, i); j++)
5614 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5615 }
5616 break;
da43a810 5617
aacd3885
RS
5618 case 't':
5619 case 'w':
5620 case 'i':
5621 case 's':
5622 case 'S':
5623 case 'u':
5624 case '0':
5625 /* These are left unchanged. */
5626 break;
da43a810 5627
aacd3885
RS
5628 default:
5629 gcc_unreachable ();
5630 }
da43a810
BS
5631
5632 if (code == SCRATCH)
5633 {
5634 i = copy_insn_n_scratches++;
5b0264cb 5635 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5636 copy_insn_scratch_in[i] = orig;
5637 copy_insn_scratch_out[i] = copy;
5638 }
5639 else if (code == ASM_OPERANDS)
5640 {
6462bb43
AO
5641 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5642 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5643 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5644 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5645 }
5646
5647 return copy;
5648}
5649
5650/* Create a new copy of an rtx.
5651 This function differs from copy_rtx in that it handles SCRATCHes and
5652 ASM_OPERANDs properly.
5653 INSN doesn't really have to be a full INSN; it could be just the
5654 pattern. */
5655rtx
502b8322 5656copy_insn (rtx insn)
da43a810
BS
5657{
5658 copy_insn_n_scratches = 0;
5659 orig_asm_operands_vector = 0;
5660 orig_asm_constraints_vector = 0;
5661 copy_asm_operands_vector = 0;
5662 copy_asm_constraints_vector = 0;
5663 return copy_insn_1 (insn);
5664}
59ec66dc 5665
8e383849
JR
5666/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5667 on that assumption that INSN itself remains in its original place. */
5668
f8f0516e
DM
5669rtx_insn *
5670copy_delay_slot_insn (rtx_insn *insn)
8e383849
JR
5671{
5672 /* Copy INSN with its rtx_code, all its notes, location etc. */
f8f0516e 5673 insn = as_a <rtx_insn *> (copy_rtx (insn));
8e383849
JR
5674 INSN_UID (insn) = cur_insn_uid++;
5675 return insn;
5676}
5677
23b2ce53
RS
5678/* Initialize data structures and variables in this file
5679 before generating rtl for each function. */
5680
5681void
502b8322 5682init_emit (void)
23b2ce53 5683{
5936d944
JH
5684 set_first_insn (NULL);
5685 set_last_insn (NULL);
b5b8b0ac
AO
5686 if (MIN_NONDEBUG_INSN_UID)
5687 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5688 else
5689 cur_insn_uid = 1;
5690 cur_debug_insn_uid = 1;
23b2ce53 5691 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
23b2ce53 5692 first_label_num = label_num;
49ad7cfa 5693 seq_stack = NULL;
23b2ce53 5694
23b2ce53
RS
5695 /* Init the tables that describe all the pseudo regs. */
5696
3e029763 5697 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5698
3e029763 5699 crtl->emit.regno_pointer_align
1b4572a8 5700 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5701
766090c2 5702 regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
0d4903b8 5703
e50126e8 5704 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5705 memcpy (regno_reg_rtx,
5fb0e246 5706 initial_regno_reg_rtx,
6cde4876 5707 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5708
23b2ce53 5709 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5710 init_virtual_regs ();
740ab4a2
RK
5711
5712 /* Indicate that the virtual registers and stack locations are
5713 all pointers. */
3502dc9c
JDA
5714 REG_POINTER (stack_pointer_rtx) = 1;
5715 REG_POINTER (frame_pointer_rtx) = 1;
5716 REG_POINTER (hard_frame_pointer_rtx) = 1;
5717 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5718
3502dc9c
JDA
5719 REG_POINTER (virtual_incoming_args_rtx) = 1;
5720 REG_POINTER (virtual_stack_vars_rtx) = 1;
5721 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5722 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5723 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5724
86fe05e0 5725#ifdef STACK_BOUNDARY
bdb429a5
RK
5726 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5727 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5728 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5729 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5730
5731 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5732 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5733 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5734 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5735 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5736#endif
5737
5e82e7bd
JVA
5738#ifdef INIT_EXPANDERS
5739 INIT_EXPANDERS;
5740#endif
23b2ce53
RS
5741}
5742
a73b091d 5743/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5744
5745static rtx
a73b091d 5746gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5747{
5748 rtx tem;
5749 rtvec v;
5750 int units, i;
5751 enum machine_mode inner;
5752
5753 units = GET_MODE_NUNITS (mode);
5754 inner = GET_MODE_INNER (mode);
5755
15ed7b52
JG
5756 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5757
69ef87e2
AH
5758 v = rtvec_alloc (units);
5759
a73b091d
JW
5760 /* We need to call this function after we set the scalar const_tiny_rtx
5761 entries. */
5762 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5763
5764 for (i = 0; i < units; ++i)
a73b091d 5765 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5766
a06e3c40 5767 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5768 return tem;
5769}
5770
a06e3c40 5771/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5772 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5773rtx
502b8322 5774gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5775{
a73b091d
JW
5776 enum machine_mode inner = GET_MODE_INNER (mode);
5777 int nunits = GET_MODE_NUNITS (mode);
5778 rtx x;
a06e3c40
R
5779 int i;
5780
a73b091d
JW
5781 /* Check to see if all of the elements have the same value. */
5782 x = RTVEC_ELT (v, nunits - 1);
5783 for (i = nunits - 2; i >= 0; i--)
5784 if (RTVEC_ELT (v, i) != x)
5785 break;
5786
5787 /* If the values are all the same, check to see if we can use one of the
5788 standard constant vectors. */
5789 if (i == -1)
5790 {
5791 if (x == CONST0_RTX (inner))
5792 return CONST0_RTX (mode);
5793 else if (x == CONST1_RTX (inner))
5794 return CONST1_RTX (mode);
e7c82a99
JJ
5795 else if (x == CONSTM1_RTX (inner))
5796 return CONSTM1_RTX (mode);
a73b091d
JW
5797 }
5798
5799 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5800}
5801
b5deb7b6
SL
5802/* Initialise global register information required by all functions. */
5803
5804void
5805init_emit_regs (void)
5806{
5807 int i;
1c3f523e
RS
5808 enum machine_mode mode;
5809 mem_attrs *attrs;
b5deb7b6
SL
5810
5811 /* Reset register attributes */
5812 htab_empty (reg_attrs_htab);
5813
5814 /* We need reg_raw_mode, so initialize the modes now. */
5815 init_reg_modes_target ();
5816
5817 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5818 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5819 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5820 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5821 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5822 virtual_incoming_args_rtx =
5823 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5824 virtual_stack_vars_rtx =
5825 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5826 virtual_stack_dynamic_rtx =
5827 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5828 virtual_outgoing_args_rtx =
5829 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5830 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5831 virtual_preferred_stack_boundary_rtx =
5832 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5833
5834 /* Initialize RTL for commonly used hard registers. These are
5835 copied into regno_reg_rtx as we begin to compile each function. */
5836 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5837 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5838
5839#ifdef RETURN_ADDRESS_POINTER_REGNUM
5840 return_address_pointer_rtx
5841 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5842#endif
5843
b5deb7b6
SL
5844 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5845 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5846 else
5847 pic_offset_table_rtx = NULL_RTX;
1c3f523e
RS
5848
5849 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5850 {
5851 mode = (enum machine_mode) i;
766090c2 5852 attrs = ggc_cleared_alloc<mem_attrs> ();
1c3f523e
RS
5853 attrs->align = BITS_PER_UNIT;
5854 attrs->addrspace = ADDR_SPACE_GENERIC;
5855 if (mode != BLKmode)
5856 {
754c3d5d
RS
5857 attrs->size_known_p = true;
5858 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5859 if (STRICT_ALIGNMENT)
5860 attrs->align = GET_MODE_ALIGNMENT (mode);
5861 }
5862 mode_mem_attrs[i] = attrs;
5863 }
b5deb7b6
SL
5864}
5865
aa3a12d6
RS
5866/* Initialize global machine_mode variables. */
5867
5868void
5869init_derived_machine_modes (void)
5870{
5871 byte_mode = VOIDmode;
5872 word_mode = VOIDmode;
5873
5874 for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5875 mode != VOIDmode;
5876 mode = GET_MODE_WIDER_MODE (mode))
5877 {
5878 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5879 && byte_mode == VOIDmode)
5880 byte_mode = mode;
5881
5882 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5883 && word_mode == VOIDmode)
5884 word_mode = mode;
5885 }
5886
5887 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5888}
5889
2d888286 5890/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5891
5892void
2d888286 5893init_emit_once (void)
23b2ce53
RS
5894{
5895 int i;
5896 enum machine_mode mode;
9ec36da5 5897 enum machine_mode double_mode;
23b2ce53 5898
807e902e
KZ
5899 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5900 CONST_FIXED, and memory attribute hash tables. */
17211ab5
GK
5901 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5902 const_int_htab_eq, NULL);
173b24b9 5903
807e902e
KZ
5904#if TARGET_SUPPORTS_WIDE_INT
5905 const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
5906 const_wide_int_htab_eq, NULL);
5907#endif
17211ab5
GK
5908 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5909 const_double_htab_eq, NULL);
5692c7bc 5910
091a3ac7
CF
5911 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5912 const_fixed_htab_eq, NULL);
5913
a560d4d4
JH
5914 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5915 reg_attrs_htab_eq, NULL);
67673f5c 5916
5da077de 5917#ifdef INIT_EXPANDERS
414c4dc4
NC
5918 /* This is to initialize {init|mark|free}_machine_status before the first
5919 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5920 end which calls push_function_context_to before the first call to
5da077de
AS
5921 init_function_start. */
5922 INIT_EXPANDERS;
5923#endif
5924
23b2ce53
RS
5925 /* Create the unique rtx's for certain rtx codes and operand values. */
5926
a2a8cc44 5927 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5928 tries to use these variables. */
23b2ce53 5929 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5930 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5931 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5932
68d75312
JC
5933 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5934 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5935 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5936 else
3b80f6ca 5937 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5938
aa3a12d6
RS
5939 double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5940
807e902e
KZ
5941 real_from_integer (&dconst0, double_mode, 0, SIGNED);
5942 real_from_integer (&dconst1, double_mode, 1, SIGNED);
5943 real_from_integer (&dconst2, double_mode, 2, SIGNED);
aefa9d43
KG
5944
5945 dconstm1 = dconst1;
5946 dconstm1.sign = 1;
03f2ea93
RS
5947
5948 dconsthalf = dconst1;
1e92bbb9 5949 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5950
e7c82a99 5951 for (i = 0; i < 3; i++)
23b2ce53 5952 {
aefa9d43 5953 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5954 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5955
15ed7b52
JG
5956 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5957 mode != VOIDmode;
5958 mode = GET_MODE_WIDER_MODE (mode))
5959 const_tiny_rtx[i][(int) mode] =
5960 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5961
5962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5963 mode != VOIDmode;
23b2ce53 5964 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5965 const_tiny_rtx[i][(int) mode] =
5966 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5967
906c4e36 5968 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5969
15ed7b52
JG
5970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5971 mode != VOIDmode;
23b2ce53 5972 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5973 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559 5974
ede6c734
MS
5975 for (mode = MIN_MODE_PARTIAL_INT;
5976 mode <= MAX_MODE_PARTIAL_INT;
5977 mode = (enum machine_mode)((int)(mode) + 1))
33d3e559 5978 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5979 }
5980
e7c82a99
JJ
5981 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5982
5983 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5984 mode != VOIDmode;
5985 mode = GET_MODE_WIDER_MODE (mode))
5986 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5987
ede6c734
MS
5988 for (mode = MIN_MODE_PARTIAL_INT;
5989 mode <= MAX_MODE_PARTIAL_INT;
5990 mode = (enum machine_mode)((int)(mode) + 1))
c8a89d2a
BS
5991 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5992
e90721b1
AP
5993 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5994 mode != VOIDmode;
5995 mode = GET_MODE_WIDER_MODE (mode))
5996 {
5997 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5998 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5999 }
6000
6001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6002 mode != VOIDmode;
6003 mode = GET_MODE_WIDER_MODE (mode))
6004 {
6005 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6006 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6007 }
6008
69ef87e2
AH
6009 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6010 mode != VOIDmode;
6011 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
6012 {
6013 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6014 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 6015 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 6016 }
69ef87e2
AH
6017
6018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6019 mode != VOIDmode;
6020 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
6021 {
6022 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6023 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6024 }
69ef87e2 6025
325217ed
CF
6026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6027 mode != VOIDmode;
6028 mode = GET_MODE_WIDER_MODE (mode))
6029 {
c3284718
RS
6030 FCONST0 (mode).data.high = 0;
6031 FCONST0 (mode).data.low = 0;
6032 FCONST0 (mode).mode = mode;
091a3ac7
CF
6033 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6034 FCONST0 (mode), mode);
325217ed
CF
6035 }
6036
6037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6038 mode != VOIDmode;
6039 mode = GET_MODE_WIDER_MODE (mode))
6040 {
c3284718
RS
6041 FCONST0 (mode).data.high = 0;
6042 FCONST0 (mode).data.low = 0;
6043 FCONST0 (mode).mode = mode;
091a3ac7
CF
6044 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6045 FCONST0 (mode), mode);
325217ed
CF
6046 }
6047
6048 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6049 mode != VOIDmode;
6050 mode = GET_MODE_WIDER_MODE (mode))
6051 {
c3284718
RS
6052 FCONST0 (mode).data.high = 0;
6053 FCONST0 (mode).data.low = 0;
6054 FCONST0 (mode).mode = mode;
091a3ac7
CF
6055 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6056 FCONST0 (mode), mode);
325217ed
CF
6057
6058 /* We store the value 1. */
c3284718
RS
6059 FCONST1 (mode).data.high = 0;
6060 FCONST1 (mode).data.low = 0;
6061 FCONST1 (mode).mode = mode;
6062 FCONST1 (mode).data
9be0ac8c
LC
6063 = double_int_one.lshift (GET_MODE_FBIT (mode),
6064 HOST_BITS_PER_DOUBLE_INT,
6065 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6066 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6067 FCONST1 (mode), mode);
325217ed
CF
6068 }
6069
6070 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6071 mode != VOIDmode;
6072 mode = GET_MODE_WIDER_MODE (mode))
6073 {
c3284718
RS
6074 FCONST0 (mode).data.high = 0;
6075 FCONST0 (mode).data.low = 0;
6076 FCONST0 (mode).mode = mode;
091a3ac7
CF
6077 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6078 FCONST0 (mode), mode);
325217ed
CF
6079
6080 /* We store the value 1. */
c3284718
RS
6081 FCONST1 (mode).data.high = 0;
6082 FCONST1 (mode).data.low = 0;
6083 FCONST1 (mode).mode = mode;
6084 FCONST1 (mode).data
9be0ac8c
LC
6085 = double_int_one.lshift (GET_MODE_FBIT (mode),
6086 HOST_BITS_PER_DOUBLE_INT,
6087 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
6088 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6089 FCONST1 (mode), mode);
6090 }
6091
6092 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6093 mode != VOIDmode;
6094 mode = GET_MODE_WIDER_MODE (mode))
6095 {
6096 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6097 }
6098
6099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6100 mode != VOIDmode;
6101 mode = GET_MODE_WIDER_MODE (mode))
6102 {
6103 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6104 }
6105
6106 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6107 mode != VOIDmode;
6108 mode = GET_MODE_WIDER_MODE (mode))
6109 {
6110 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6111 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6112 }
6113
6114 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6115 mode != VOIDmode;
6116 mode = GET_MODE_WIDER_MODE (mode))
6117 {
6118 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6119 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
6120 }
6121
dbbbbf3b
JDA
6122 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6123 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
6124 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 6125
f0417c82
RH
6126 const_tiny_rtx[0][(int) BImode] = const0_rtx;
6127 if (STORE_FLAG_VALUE == 1)
6128 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91
RS
6129
6130 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6131 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6132 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6133 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
23b2ce53 6134}
a11759a3 6135\f
969d70ca
JH
6136/* Produce exact duplicate of insn INSN after AFTER.
6137 Care updating of libcall regions if present. */
6138
cd459bf8 6139rtx_insn *
a1950df3 6140emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
969d70ca 6141{
cd459bf8
DM
6142 rtx_insn *new_rtx;
6143 rtx link;
969d70ca
JH
6144
6145 switch (GET_CODE (insn))
6146 {
6147 case INSN:
60564289 6148 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
6149 break;
6150
6151 case JUMP_INSN:
60564289 6152 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
ec27069c 6153 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
969d70ca
JH
6154 break;
6155
b5b8b0ac
AO
6156 case DEBUG_INSN:
6157 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6158 break;
6159
969d70ca 6160 case CALL_INSN:
60564289 6161 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6162 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6163 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6164 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6165 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6166 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6167 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6168 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6169 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6170 break;
6171
6172 default:
5b0264cb 6173 gcc_unreachable ();
969d70ca
JH
6174 }
6175
6176 /* Update LABEL_NUSES. */
60564289 6177 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6178
5368224f 6179 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
ba4f7968 6180
0a3d71f5
JW
6181 /* If the old insn is frame related, then so is the new one. This is
6182 primarily needed for IA-64 unwind info which marks epilogue insns,
6183 which may be duplicated by the basic block reordering code. */
60564289 6184 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6185
cf7c4aa6
HPN
6186 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6187 will make them. REG_LABEL_TARGETs are created there too, but are
6188 supposed to be sticky, so we copy them. */
969d70ca 6189 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6190 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
6191 {
6192 if (GET_CODE (link) == EXPR_LIST)
60564289 6193 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 6194 copy_insn_1 (XEXP (link, 0)));
969d70ca 6195 else
e5af9ddd 6196 add_shallow_copy_of_reg_note (new_rtx, link);
969d70ca
JH
6197 }
6198
60564289
KG
6199 INSN_CODE (new_rtx) = INSN_CODE (insn);
6200 return new_rtx;
969d70ca 6201}
e2500fed 6202
1431042e 6203static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
6204rtx
6205gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6206{
6207 if (hard_reg_clobbers[mode][regno])
6208 return hard_reg_clobbers[mode][regno];
6209 else
6210 return (hard_reg_clobbers[mode][regno] =
6211 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6212}
6213
5368224f
DC
6214location_t prologue_location;
6215location_t epilogue_location;
78bde837
SB
6216
6217/* Hold current location information and last location information, so the
6218 datastructures are built lazily only when some instructions in given
6219 place are needed. */
3a50da34 6220static location_t curr_location;
78bde837 6221
5368224f 6222/* Allocate insn location datastructure. */
78bde837 6223void
5368224f 6224insn_locations_init (void)
78bde837 6225{
5368224f 6226 prologue_location = epilogue_location = 0;
78bde837 6227 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6228}
6229
6230/* At the end of emit stage, clear current location. */
6231void
5368224f 6232insn_locations_finalize (void)
78bde837 6233{
5368224f
DC
6234 epilogue_location = curr_location;
6235 curr_location = UNKNOWN_LOCATION;
78bde837
SB
6236}
6237
6238/* Set current location. */
6239void
5368224f 6240set_curr_insn_location (location_t location)
78bde837 6241{
78bde837
SB
6242 curr_location = location;
6243}
6244
6245/* Get current location. */
6246location_t
5368224f 6247curr_insn_location (void)
78bde837
SB
6248{
6249 return curr_location;
6250}
6251
78bde837
SB
6252/* Return lexical scope block insn belongs to. */
6253tree
a1950df3 6254insn_scope (const rtx_insn *insn)
78bde837 6255{
5368224f 6256 return LOCATION_BLOCK (INSN_LOCATION (insn));
78bde837
SB
6257}
6258
6259/* Return line number of the statement that produced this insn. */
6260int
a1950df3 6261insn_line (const rtx_insn *insn)
78bde837 6262{
5368224f 6263 return LOCATION_LINE (INSN_LOCATION (insn));
78bde837
SB
6264}
6265
6266/* Return source file of the statement that produced this insn. */
6267const char *
a1950df3 6268insn_file (const rtx_insn *insn)
78bde837 6269{
5368224f 6270 return LOCATION_FILE (INSN_LOCATION (insn));
78bde837 6271}
8930883e 6272
ffa4602f
EB
6273/* Return expanded location of the statement that produced this insn. */
6274expanded_location
a1950df3 6275insn_location (const rtx_insn *insn)
ffa4602f
EB
6276{
6277 return expand_location (INSN_LOCATION (insn));
6278}
6279
8930883e
MK
6280/* Return true if memory model MODEL requires a pre-operation (release-style)
6281 barrier or a post-operation (acquire-style) barrier. While not universal,
6282 this function matches behavior of several targets. */
6283
6284bool
6285need_atomic_barrier_p (enum memmodel model, bool pre)
6286{
88e784e6 6287 switch (model & MEMMODEL_MASK)
8930883e
MK
6288 {
6289 case MEMMODEL_RELAXED:
6290 case MEMMODEL_CONSUME:
6291 return false;
6292 case MEMMODEL_RELEASE:
6293 return pre;
6294 case MEMMODEL_ACQUIRE:
6295 return !pre;
6296 case MEMMODEL_ACQ_REL:
6297 case MEMMODEL_SEQ_CST:
6298 return true;
6299 default:
6300 gcc_unreachable ();
6301 }
6302}
6303\f
e2500fed 6304#include "gt-emit-rtl.h"