]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
libgcov.c (__gcov_execl, [...]): Remove const qualifier from arg parameter.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
2d593c86 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
b6f65e3c 4 Free Software Foundation, Inc.
23b2ce53 5
1322177d 6This file is part of GCC.
23b2ce53 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
23b2ce53 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
23b2ce53
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
23b2ce53
RS
21
22
23/* Middle-to-low level generation of rtx code and insns.
24
f822fcf7
KH
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
23b2ce53
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
01198c2f 40#include "toplev.h"
23b2ce53 41#include "rtl.h"
a25c7971 42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
23b2ce53
RS
44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
aff48bca 48#include "hard-reg-set.h"
c13e8210 49#include "hashtab.h"
23b2ce53 50#include "insn-config.h"
e9a25f70 51#include "recog.h"
23b2ce53 52#include "real.h"
325217ed 53#include "fixed-value.h"
0dfa1860 54#include "bitmap.h"
a05924f9 55#include "basic-block.h"
87ff9c8e 56#include "ggc.h"
e1772ac0 57#include "debug.h"
d23c55c2 58#include "langhooks.h"
ef330312 59#include "tree-pass.h"
6fb5fa3c 60#include "df.h"
ca695ac9 61
1d445e9e
ILT
62/* Commonly used modes. */
63
0f41302f
MS
64enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
65enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 66enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 67enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 68
bd60bab2
JH
69/* Datastructures maintained for currently processed function in RTL form. */
70
3e029763 71struct rtl_data x_rtl;
bd60bab2
JH
72
73/* Indexed by pseudo register number, gives the rtx for that pseudo.
74 Allocated in parallel with regno_pointer_align.
75 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
76 with length attribute nested in top level structures. */
77
78rtx * regno_reg_rtx;
23b2ce53
RS
79
80/* This is *not* reset after each function. It gives each CODE_LABEL
81 in the entire compilation a unique label number. */
82
044b4de3 83static GTY(()) int label_num = 1;
23b2ce53 84
23b2ce53
RS
85/* Nonzero means do not generate NOTEs for source line numbers. */
86
87static int no_line_numbers;
88
89/* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
5692c7bc
ZW
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
23b2ce53 93
5da077de 94rtx global_rtl[GR_MAX];
23b2ce53 95
6cde4876
JL
96/* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
23b2ce53
RS
102/* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
68d75312
JC
108rtx const_true_rtx;
109
23b2ce53
RS
110REAL_VALUE_TYPE dconst0;
111REAL_VALUE_TYPE dconst1;
112REAL_VALUE_TYPE dconst2;
113REAL_VALUE_TYPE dconstm1;
03f2ea93 114REAL_VALUE_TYPE dconsthalf;
23b2ce53 115
325217ed
CF
116/* Record fixed-point constant 0 and 1. */
117FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
118FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
119
23b2ce53
RS
120/* All references to the following fixed hard registers go through
121 these unique rtl objects. On machines where the frame-pointer and
122 arg-pointer are the same register, they use the same unique object.
123
124 After register allocation, other rtl objects which used to be pseudo-regs
125 may be clobbered to refer to the frame-pointer register.
126 But references that were originally to the frame-pointer can be
127 distinguished from the others because they contain frame_pointer_rtx.
128
ac6f08b0
DE
129 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
130 tricky: until register elimination has taken place hard_frame_pointer_rtx
750c9258 131 should be used if it is being set, and frame_pointer_rtx otherwise. After
ac6f08b0
DE
132 register elimination hard_frame_pointer_rtx should always be used.
133 On machines where the two registers are same (most) then these are the
134 same.
135
23b2ce53
RS
136 In an inline procedure, the stack and frame pointer rtxs may not be
137 used for anything else. */
23b2ce53
RS
138rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
139rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
140rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141
a4417a86
JW
142/* This is used to implement __builtin_return_address for some machines.
143 See for instance the MIPS port. */
144rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145
23b2ce53
RS
146/* We make one copy of (const_int C) where C is in
147 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
148 to save space during the compilation and simplify comparisons of
149 integers. */
150
5da077de 151rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 152
c13e8210
MM
153/* A hash table storing CONST_INTs whose absolute value is greater
154 than MAX_SAVED_CONST_INT. */
155
e2500fed
GK
156static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
157 htab_t const_int_htab;
c13e8210 158
173b24b9 159/* A hash table storing memory attribute structures. */
e2500fed
GK
160static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
161 htab_t mem_attrs_htab;
173b24b9 162
a560d4d4
JH
163/* A hash table storing register attribute structures. */
164static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
165 htab_t reg_attrs_htab;
166
5692c7bc 167/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
168static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_double_htab;
5692c7bc 170
091a3ac7
CF
171/* A hash table storing all CONST_FIXEDs. */
172static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
173 htab_t const_fixed_htab;
174
3e029763
JH
175#define first_insn (crtl->emit.x_first_insn)
176#define last_insn (crtl->emit.x_last_insn)
177#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
178#define last_location (crtl->emit.x_last_location)
179#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 180
502b8322 181static rtx make_call_insn_raw (rtx);
502b8322 182static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 183static void set_used_decls (tree);
502b8322
AJ
184static void mark_label_nuses (rtx);
185static hashval_t const_int_htab_hash (const void *);
186static int const_int_htab_eq (const void *, const void *);
187static hashval_t const_double_htab_hash (const void *);
188static int const_double_htab_eq (const void *, const void *);
189static rtx lookup_const_double (rtx);
091a3ac7
CF
190static hashval_t const_fixed_htab_hash (const void *);
191static int const_fixed_htab_eq (const void *, const void *);
192static rtx lookup_const_fixed (rtx);
502b8322
AJ
193static hashval_t mem_attrs_htab_hash (const void *);
194static int mem_attrs_htab_eq (const void *, const void *);
4862826d 195static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
502b8322
AJ
196 enum machine_mode);
197static hashval_t reg_attrs_htab_hash (const void *);
198static int reg_attrs_htab_eq (const void *, const void *);
199static reg_attrs *get_reg_attrs (tree, int);
200static tree component_ref_for_mem_expr (tree);
a73b091d 201static rtx gen_const_vector (enum machine_mode, int);
32b32b16 202static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 203
6b24c259
JH
204/* Probability of the conditional branch currently proceeded by try_split.
205 Set to -1 otherwise. */
206int split_branch_probability = -1;
ca695ac9 207\f
c13e8210
MM
208/* Returns a hash code for X (which is a really a CONST_INT). */
209
210static hashval_t
502b8322 211const_int_htab_hash (const void *x)
c13e8210 212{
f7d504c2 213 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
214}
215
cc2902df 216/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
217 CONST_INT) is the same as that given by Y (which is really a
218 HOST_WIDE_INT *). */
219
220static int
502b8322 221const_int_htab_eq (const void *x, const void *y)
c13e8210 222{
f7d504c2 223 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
224}
225
226/* Returns a hash code for X (which is really a CONST_DOUBLE). */
227static hashval_t
502b8322 228const_double_htab_hash (const void *x)
5692c7bc 229{
f7d504c2 230 const_rtx const value = (const_rtx) x;
46b33600 231 hashval_t h;
5692c7bc 232
46b33600
RH
233 if (GET_MODE (value) == VOIDmode)
234 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
235 else
fe352c29 236 {
15c812e3 237 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
238 /* MODE is used in the comparison, so it should be in the hash. */
239 h ^= GET_MODE (value);
240 }
5692c7bc
ZW
241 return h;
242}
243
cc2902df 244/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
245 is the same as that represented by Y (really a ...) */
246static int
502b8322 247const_double_htab_eq (const void *x, const void *y)
5692c7bc 248{
f7d504c2 249 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
250
251 if (GET_MODE (a) != GET_MODE (b))
252 return 0;
8580f7a0
RH
253 if (GET_MODE (a) == VOIDmode)
254 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
255 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
256 else
257 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
258 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
259}
260
091a3ac7
CF
261/* Returns a hash code for X (which is really a CONST_FIXED). */
262
263static hashval_t
264const_fixed_htab_hash (const void *x)
265{
3101faab 266 const_rtx const value = (const_rtx) x;
091a3ac7
CF
267 hashval_t h;
268
269 h = fixed_hash (CONST_FIXED_VALUE (value));
270 /* MODE is used in the comparison, so it should be in the hash. */
271 h ^= GET_MODE (value);
272 return h;
273}
274
275/* Returns nonzero if the value represented by X (really a ...)
276 is the same as that represented by Y (really a ...). */
277
278static int
279const_fixed_htab_eq (const void *x, const void *y)
280{
3101faab 281 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
282
283 if (GET_MODE (a) != GET_MODE (b))
284 return 0;
285 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
286}
287
173b24b9
RK
288/* Returns a hash code for X (which is a really a mem_attrs *). */
289
290static hashval_t
502b8322 291mem_attrs_htab_hash (const void *x)
173b24b9 292{
f7d504c2 293 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
294
295 return (p->alias ^ (p->align * 1000)
296 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
297 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
78b76d08 298 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
299}
300
cc2902df 301/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
302 mem_attrs *) is the same as that given by Y (which is also really a
303 mem_attrs *). */
c13e8210
MM
304
305static int
502b8322 306mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 307{
741ac903
KG
308 const mem_attrs *const p = (const mem_attrs *) x;
309 const mem_attrs *const q = (const mem_attrs *) y;
173b24b9 310
78b76d08
SB
311 return (p->alias == q->alias && p->offset == q->offset
312 && p->size == q->size && p->align == q->align
313 && (p->expr == q->expr
314 || (p->expr != NULL_TREE && q->expr != NULL_TREE
315 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
316}
317
173b24b9 318/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
319 one identical to it is not already in the table. We are doing this for
320 MEM of mode MODE. */
173b24b9
RK
321
322static mem_attrs *
4862826d 323get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
502b8322 324 unsigned int align, enum machine_mode mode)
173b24b9
RK
325{
326 mem_attrs attrs;
327 void **slot;
328
bb056a77
OH
329 /* If everything is the default, we can just return zero.
330 This must match what the corresponding MEM_* macros return when the
331 field is not present. */
998d7deb 332 if (alias == 0 && expr == 0 && offset == 0
10b76d73
RK
333 && (size == 0
334 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
335 && (STRICT_ALIGNMENT && mode != BLKmode
336 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
337 return 0;
338
173b24b9 339 attrs.alias = alias;
998d7deb 340 attrs.expr = expr;
173b24b9
RK
341 attrs.offset = offset;
342 attrs.size = size;
343 attrs.align = align;
344
345 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
346 if (*slot == 0)
347 {
348 *slot = ggc_alloc (sizeof (mem_attrs));
349 memcpy (*slot, &attrs, sizeof (mem_attrs));
350 }
351
1b4572a8 352 return (mem_attrs *) *slot;
c13e8210
MM
353}
354
a560d4d4
JH
355/* Returns a hash code for X (which is a really a reg_attrs *). */
356
357static hashval_t
502b8322 358reg_attrs_htab_hash (const void *x)
a560d4d4 359{
741ac903 360 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4
JH
361
362 return ((p->offset * 1000) ^ (long) p->decl);
363}
364
6356f892 365/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
366 reg_attrs *) is the same as that given by Y (which is also really a
367 reg_attrs *). */
368
369static int
502b8322 370reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 371{
741ac903
KG
372 const reg_attrs *const p = (const reg_attrs *) x;
373 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
374
375 return (p->decl == q->decl && p->offset == q->offset);
376}
377/* Allocate a new reg_attrs structure and insert it into the hash table if
378 one identical to it is not already in the table. We are doing this for
379 MEM of mode MODE. */
380
381static reg_attrs *
502b8322 382get_reg_attrs (tree decl, int offset)
a560d4d4
JH
383{
384 reg_attrs attrs;
385 void **slot;
386
387 /* If everything is the default, we can just return zero. */
388 if (decl == 0 && offset == 0)
389 return 0;
390
391 attrs.decl = decl;
392 attrs.offset = offset;
393
394 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
395 if (*slot == 0)
396 {
397 *slot = ggc_alloc (sizeof (reg_attrs));
398 memcpy (*slot, &attrs, sizeof (reg_attrs));
399 }
400
1b4572a8 401 return (reg_attrs *) *slot;
a560d4d4
JH
402}
403
6fb5fa3c
DB
404
405#if !HAVE_blockage
406/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
407 across this insn. */
408
409rtx
410gen_blockage (void)
411{
412 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
413 MEM_VOLATILE_P (x) = true;
414 return x;
415}
416#endif
417
418
08394eef
BS
419/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
420 don't attempt to share with the various global pieces of rtl (such as
421 frame_pointer_rtx). */
422
423rtx
502b8322 424gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
425{
426 rtx x = gen_rtx_raw_REG (mode, regno);
427 ORIGINAL_REGNO (x) = regno;
428 return x;
429}
430
c5c76735
JL
431/* There are some RTL codes that require special attention; the generation
432 functions do the raw handling. If you add to this list, modify
433 special_rtx in gengenrtl.c as well. */
434
3b80f6ca 435rtx
502b8322 436gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 437{
c13e8210
MM
438 void **slot;
439
3b80f6ca 440 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 441 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
442
443#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
444 if (const_true_rtx && arg == STORE_FLAG_VALUE)
445 return const_true_rtx;
446#endif
447
c13e8210 448 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
449 slot = htab_find_slot_with_hash (const_int_htab, &arg,
450 (hashval_t) arg, INSERT);
29105cea 451 if (*slot == 0)
1f8f4a0b 452 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
453
454 return (rtx) *slot;
3b80f6ca
RH
455}
456
2496c7bd 457rtx
502b8322 458gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
459{
460 return GEN_INT (trunc_int_for_mode (c, mode));
461}
462
5692c7bc
ZW
463/* CONST_DOUBLEs might be created from pairs of integers, or from
464 REAL_VALUE_TYPEs. Also, their length is known only at run time,
465 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
466
467/* Determine whether REAL, a CONST_DOUBLE, already exists in the
468 hash table. If so, return its counterpart; otherwise add it
469 to the hash table and return it. */
470static rtx
502b8322 471lookup_const_double (rtx real)
5692c7bc
ZW
472{
473 void **slot = htab_find_slot (const_double_htab, real, INSERT);
474 if (*slot == 0)
475 *slot = real;
476
477 return (rtx) *slot;
478}
29105cea 479
5692c7bc
ZW
480/* Return a CONST_DOUBLE rtx for a floating-point value specified by
481 VALUE in mode MODE. */
0133b7d9 482rtx
502b8322 483const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 484{
5692c7bc
ZW
485 rtx real = rtx_alloc (CONST_DOUBLE);
486 PUT_MODE (real, mode);
487
9e254451 488 real->u.rv = value;
5692c7bc
ZW
489
490 return lookup_const_double (real);
491}
492
091a3ac7
CF
493/* Determine whether FIXED, a CONST_FIXED, already exists in the
494 hash table. If so, return its counterpart; otherwise add it
495 to the hash table and return it. */
496
497static rtx
498lookup_const_fixed (rtx fixed)
499{
500 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
501 if (*slot == 0)
502 *slot = fixed;
503
504 return (rtx) *slot;
505}
506
507/* Return a CONST_FIXED rtx for a fixed-point value specified by
508 VALUE in mode MODE. */
509
510rtx
511const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
512{
513 rtx fixed = rtx_alloc (CONST_FIXED);
514 PUT_MODE (fixed, mode);
515
516 fixed->u.fv = value;
517
518 return lookup_const_fixed (fixed);
519}
520
5692c7bc
ZW
521/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
522 of ints: I0 is the low-order word and I1 is the high-order word.
523 Do not use this routine for non-integer modes; convert to
524 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
525
526rtx
502b8322 527immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
528{
529 rtx value;
530 unsigned int i;
531
65acccdd
ZD
532 /* There are the following cases (note that there are no modes with
533 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
534
535 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
536 gen_int_mode.
537 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
538 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
539 from copies of the sign bit, and sign of i0 and i1 are the same), then
540 we return a CONST_INT for i0.
541 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
542 if (mode != VOIDmode)
543 {
5b0264cb
NS
544 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
545 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
546 /* We can get a 0 for an error mark. */
547 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
548 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 549
65acccdd
ZD
550 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
551 return gen_int_mode (i0, mode);
552
553 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
5692c7bc
ZW
554 }
555
556 /* If this integer fits in one word, return a CONST_INT. */
557 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
558 return GEN_INT (i0);
559
560 /* We use VOIDmode for integers. */
561 value = rtx_alloc (CONST_DOUBLE);
562 PUT_MODE (value, VOIDmode);
563
564 CONST_DOUBLE_LOW (value) = i0;
565 CONST_DOUBLE_HIGH (value) = i1;
566
567 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
568 XWINT (value, i) = 0;
569
570 return lookup_const_double (value);
0133b7d9
RH
571}
572
3b80f6ca 573rtx
502b8322 574gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
575{
576 /* In case the MD file explicitly references the frame pointer, have
577 all such references point to the same frame pointer. This is
578 used during frame pointer elimination to distinguish the explicit
579 references to these registers from pseudos that happened to be
580 assigned to them.
581
582 If we have eliminated the frame pointer or arg pointer, we will
583 be using it as a normal register, for example as a spill
584 register. In such cases, we might be accessing it in a mode that
585 is not Pmode and therefore cannot use the pre-allocated rtx.
586
587 Also don't do this when we are making new REGs in reload, since
588 we don't want to get confused with the real pointers. */
589
590 if (mode == Pmode && !reload_in_progress)
591 {
e10c79fe
LB
592 if (regno == FRAME_POINTER_REGNUM
593 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
594 return frame_pointer_rtx;
595#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e10c79fe
LB
596 if (regno == HARD_FRAME_POINTER_REGNUM
597 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
598 return hard_frame_pointer_rtx;
599#endif
600#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 601 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
602 return arg_pointer_rtx;
603#endif
604#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 605 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
606 return return_address_pointer_rtx;
607#endif
fc555370 608 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
2d67bd7b 609 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 610 return pic_offset_table_rtx;
bcb33994 611 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
612 return stack_pointer_rtx;
613 }
614
006a94b0 615#if 0
6cde4876 616 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
617 an existing entry in that table to avoid useless generation of RTL.
618
619 This code is disabled for now until we can fix the various backends
620 which depend on having non-shared hard registers in some cases. Long
621 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
622 on the amount of useless RTL that gets generated.
623
624 We'll also need to fix some code that runs after reload that wants to
625 set ORIGINAL_REGNO. */
626
6cde4876
JL
627 if (cfun
628 && cfun->emit
629 && regno_reg_rtx
630 && regno < FIRST_PSEUDO_REGISTER
631 && reg_raw_mode[regno] == mode)
632 return regno_reg_rtx[regno];
006a94b0 633#endif
6cde4876 634
08394eef 635 return gen_raw_REG (mode, regno);
3b80f6ca
RH
636}
637
41472af8 638rtx
502b8322 639gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
640{
641 rtx rt = gen_rtx_raw_MEM (mode, addr);
642
643 /* This field is not cleared by the mere allocation of the rtx, so
644 we clear it here. */
173b24b9 645 MEM_ATTRS (rt) = 0;
41472af8
MM
646
647 return rt;
648}
ddef6bc7 649
542a8afa
RH
650/* Generate a memory referring to non-trapping constant memory. */
651
652rtx
653gen_const_mem (enum machine_mode mode, rtx addr)
654{
655 rtx mem = gen_rtx_MEM (mode, addr);
656 MEM_READONLY_P (mem) = 1;
657 MEM_NOTRAP_P (mem) = 1;
658 return mem;
659}
660
bf877a76
R
661/* Generate a MEM referring to fixed portions of the frame, e.g., register
662 save areas. */
663
664rtx
665gen_frame_mem (enum machine_mode mode, rtx addr)
666{
667 rtx mem = gen_rtx_MEM (mode, addr);
668 MEM_NOTRAP_P (mem) = 1;
669 set_mem_alias_set (mem, get_frame_alias_set ());
670 return mem;
671}
672
673/* Generate a MEM referring to a temporary use of the stack, not part
674 of the fixed stack frame. For example, something which is pushed
675 by a target splitter. */
676rtx
677gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
678{
679 rtx mem = gen_rtx_MEM (mode, addr);
680 MEM_NOTRAP_P (mem) = 1;
e3b5732b 681 if (!cfun->calls_alloca)
bf877a76
R
682 set_mem_alias_set (mem, get_frame_alias_set ());
683 return mem;
684}
685
beb72684
RH
686/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
687 this construct would be valid, and false otherwise. */
688
689bool
690validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 691 const_rtx reg, unsigned int offset)
ddef6bc7 692{
beb72684
RH
693 unsigned int isize = GET_MODE_SIZE (imode);
694 unsigned int osize = GET_MODE_SIZE (omode);
695
696 /* All subregs must be aligned. */
697 if (offset % osize != 0)
698 return false;
699
700 /* The subreg offset cannot be outside the inner object. */
701 if (offset >= isize)
702 return false;
703
704 /* ??? This should not be here. Temporarily continue to allow word_mode
705 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
706 Generally, backends are doing something sketchy but it'll take time to
707 fix them all. */
708 if (omode == word_mode)
709 ;
710 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
711 is the culprit here, and not the backends. */
712 else if (osize >= UNITS_PER_WORD && isize >= osize)
713 ;
714 /* Allow component subregs of complex and vector. Though given the below
715 extraction rules, it's not always clear what that means. */
716 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
717 && GET_MODE_INNER (imode) == omode)
718 ;
719 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
720 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
721 represent this. It's questionable if this ought to be represented at
722 all -- why can't this all be hidden in post-reload splitters that make
723 arbitrarily mode changes to the registers themselves. */
724 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
725 ;
726 /* Subregs involving floating point modes are not allowed to
727 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
728 (subreg:SI (reg:DF) 0) isn't. */
729 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
730 {
731 if (isize != osize)
732 return false;
733 }
ddef6bc7 734
beb72684
RH
735 /* Paradoxical subregs must have offset zero. */
736 if (osize > isize)
737 return offset == 0;
738
739 /* This is a normal subreg. Verify that the offset is representable. */
740
741 /* For hard registers, we already have most of these rules collected in
742 subreg_offset_representable_p. */
743 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
744 {
745 unsigned int regno = REGNO (reg);
746
747#ifdef CANNOT_CHANGE_MODE_CLASS
748 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
749 && GET_MODE_INNER (imode) == omode)
750 ;
751 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
752 return false;
ddef6bc7 753#endif
beb72684
RH
754
755 return subreg_offset_representable_p (regno, imode, offset, omode);
756 }
757
758 /* For pseudo registers, we want most of the same checks. Namely:
759 If the register no larger than a word, the subreg must be lowpart.
760 If the register is larger than a word, the subreg must be the lowpart
761 of a subword. A subreg does *not* perform arbitrary bit extraction.
762 Given that we've already checked mode/offset alignment, we only have
763 to check subword subregs here. */
764 if (osize < UNITS_PER_WORD)
765 {
766 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
767 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
768 if (offset % UNITS_PER_WORD != low_off)
769 return false;
770 }
771 return true;
772}
773
774rtx
775gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
776{
777 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 778 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
779}
780
173b24b9
RK
781/* Generate a SUBREG representing the least-significant part of REG if MODE
782 is smaller than mode of REG, otherwise paradoxical SUBREG. */
783
ddef6bc7 784rtx
502b8322 785gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
786{
787 enum machine_mode inmode;
ddef6bc7
JJ
788
789 inmode = GET_MODE (reg);
790 if (inmode == VOIDmode)
791 inmode = mode;
e0e08ac2
JH
792 return gen_rtx_SUBREG (mode, reg,
793 subreg_lowpart_offset (mode, inmode));
ddef6bc7 794}
c5c76735 795\f
23b2ce53 796
80379f51
PB
797/* Create an rtvec and stores within it the RTXen passed in the arguments. */
798
23b2ce53 799rtvec
e34d07f2 800gen_rtvec (int n, ...)
23b2ce53 801{
80379f51
PB
802 int i;
803 rtvec rt_val;
e34d07f2 804 va_list p;
23b2ce53 805
e34d07f2 806 va_start (p, n);
23b2ce53 807
80379f51 808 /* Don't allocate an empty rtvec... */
23b2ce53 809 if (n == 0)
80379f51 810 return NULL_RTVEC;
23b2ce53 811
80379f51 812 rt_val = rtvec_alloc (n);
4f90e4a0 813
23b2ce53 814 for (i = 0; i < n; i++)
80379f51 815 rt_val->elem[i] = va_arg (p, rtx);
6268b922 816
e34d07f2 817 va_end (p);
80379f51 818 return rt_val;
23b2ce53
RS
819}
820
821rtvec
502b8322 822gen_rtvec_v (int n, rtx *argp)
23b2ce53 823{
b3694847
SS
824 int i;
825 rtvec rt_val;
23b2ce53 826
80379f51 827 /* Don't allocate an empty rtvec... */
23b2ce53 828 if (n == 0)
80379f51 829 return NULL_RTVEC;
23b2ce53 830
80379f51 831 rt_val = rtvec_alloc (n);
23b2ce53
RS
832
833 for (i = 0; i < n; i++)
8f985ec4 834 rt_val->elem[i] = *argp++;
23b2ce53
RS
835
836 return rt_val;
837}
838\f
38ae7651
RS
839/* Return the number of bytes between the start of an OUTER_MODE
840 in-memory value and the start of an INNER_MODE in-memory value,
841 given that the former is a lowpart of the latter. It may be a
842 paradoxical lowpart, in which case the offset will be negative
843 on big-endian targets. */
844
845int
846byte_lowpart_offset (enum machine_mode outer_mode,
847 enum machine_mode inner_mode)
848{
849 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
850 return subreg_lowpart_offset (outer_mode, inner_mode);
851 else
852 return -subreg_lowpart_offset (inner_mode, outer_mode);
853}
854\f
23b2ce53
RS
855/* Generate a REG rtx for a new pseudo register of mode MODE.
856 This pseudo is assigned the next sequential register number. */
857
858rtx
502b8322 859gen_reg_rtx (enum machine_mode mode)
23b2ce53 860{
b3694847 861 rtx val;
2e3f842f 862 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 863
f8335a4f 864 gcc_assert (can_create_pseudo_p ());
23b2ce53 865
2e3f842f
L
866 /* If a virtual register with bigger mode alignment is generated,
867 increase stack alignment estimation because it might be spilled
868 to stack later. */
869 if (SUPPORTS_STACK_ALIGNMENT
870 && crtl->stack_alignment_estimated < align
871 && !crtl->stack_realign_processed)
872 crtl->stack_alignment_estimated = align;
873
1b3d8f8a
GK
874 if (generating_concat_p
875 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
876 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
877 {
878 /* For complex modes, don't make a single pseudo.
879 Instead, make a CONCAT of two pseudos.
880 This allows noncontiguous allocation of the real and imaginary parts,
881 which makes much better code. Besides, allocating DCmode
882 pseudos overstrains reload on some machines like the 386. */
883 rtx realpart, imagpart;
27e58a70 884 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
885
886 realpart = gen_reg_rtx (partmode);
887 imagpart = gen_reg_rtx (partmode);
3b80f6ca 888 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
889 }
890
a560d4d4 891 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 892 enough to have an element for this pseudo reg number. */
23b2ce53 893
3e029763 894 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 895 {
3e029763 896 int old_size = crtl->emit.regno_pointer_align_length;
60564289 897 char *tmp;
0d4903b8 898 rtx *new1;
0d4903b8 899
60564289
KG
900 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
901 memset (tmp + old_size, 0, old_size);
902 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 903
1b4572a8 904 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 905 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
906 regno_reg_rtx = new1;
907
3e029763 908 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
909 }
910
08394eef 911 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
912 regno_reg_rtx[reg_rtx_no++] = val;
913 return val;
914}
915
38ae7651
RS
916/* Update NEW with the same attributes as REG, but with OFFSET added
917 to the REG_OFFSET. */
a560d4d4 918
e53a16e7 919static void
60564289 920update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 921{
60564289 922 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 923 REG_OFFSET (reg) + offset);
e53a16e7
ILT
924}
925
38ae7651
RS
926/* Generate a register with same attributes as REG, but with OFFSET
927 added to the REG_OFFSET. */
e53a16e7
ILT
928
929rtx
930gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
931 int offset)
932{
60564289 933 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 934
60564289
KG
935 update_reg_offset (new_rtx, reg, offset);
936 return new_rtx;
e53a16e7
ILT
937}
938
939/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 940 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
941
942rtx
943gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
944{
60564289 945 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 946
60564289
KG
947 update_reg_offset (new_rtx, reg, offset);
948 return new_rtx;
a560d4d4
JH
949}
950
38ae7651
RS
951/* Adjust REG in-place so that it has mode MODE. It is assumed that the
952 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
953
954void
38ae7651 955adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 956{
38ae7651
RS
957 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
958 PUT_MODE (reg, mode);
959}
960
961/* Copy REG's attributes from X, if X has any attributes. If REG and X
962 have different modes, REG is a (possibly paradoxical) lowpart of X. */
963
964void
965set_reg_attrs_from_value (rtx reg, rtx x)
966{
967 int offset;
968
923ba36f
JJ
969 /* Hard registers can be reused for multiple purposes within the same
970 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
971 on them is wrong. */
972 if (HARD_REGISTER_P (reg))
973 return;
974
38ae7651 975 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
976 if (MEM_P (x))
977 {
978 if (MEM_OFFSET (x) && GET_CODE (MEM_OFFSET (x)) == CONST_INT)
979 REG_ATTRS (reg)
980 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
981 if (MEM_POINTER (x))
0a317111 982 mark_reg_pointer (reg, 0);
46b71b03
PB
983 }
984 else if (REG_P (x))
985 {
986 if (REG_ATTRS (x))
987 update_reg_offset (reg, x, offset);
988 if (REG_POINTER (x))
989 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
990 }
991}
992
993/* Generate a REG rtx for a new pseudo register, copying the mode
994 and attributes from X. */
995
996rtx
997gen_reg_rtx_and_attrs (rtx x)
998{
999 rtx reg = gen_reg_rtx (GET_MODE (x));
1000 set_reg_attrs_from_value (reg, x);
1001 return reg;
a560d4d4
JH
1002}
1003
9d18e06b
JZ
1004/* Set the register attributes for registers contained in PARM_RTX.
1005 Use needed values from memory attributes of MEM. */
1006
1007void
502b8322 1008set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1009{
f8cfc6aa 1010 if (REG_P (parm_rtx))
38ae7651 1011 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1012 else if (GET_CODE (parm_rtx) == PARALLEL)
1013 {
1014 /* Check for a NULL entry in the first slot, used to indicate that the
1015 parameter goes both on the stack and in registers. */
1016 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1017 for (; i < XVECLEN (parm_rtx, 0); i++)
1018 {
1019 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1020 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1021 REG_ATTRS (XEXP (x, 0))
1022 = get_reg_attrs (MEM_EXPR (mem),
1023 INTVAL (XEXP (x, 1)));
1024 }
1025 }
1026}
1027
38ae7651
RS
1028/* Set the REG_ATTRS for registers in value X, given that X represents
1029 decl T. */
a560d4d4 1030
38ae7651
RS
1031static void
1032set_reg_attrs_for_decl_rtl (tree t, rtx x)
1033{
1034 if (GET_CODE (x) == SUBREG)
fbe6ec81 1035 {
38ae7651
RS
1036 gcc_assert (subreg_lowpart_p (x));
1037 x = SUBREG_REG (x);
fbe6ec81 1038 }
f8cfc6aa 1039 if (REG_P (x))
38ae7651
RS
1040 REG_ATTRS (x)
1041 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1042 DECL_MODE (t)));
a560d4d4
JH
1043 if (GET_CODE (x) == CONCAT)
1044 {
1045 if (REG_P (XEXP (x, 0)))
1046 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1047 if (REG_P (XEXP (x, 1)))
1048 REG_ATTRS (XEXP (x, 1))
1049 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1050 }
1051 if (GET_CODE (x) == PARALLEL)
1052 {
d4afac5b
JZ
1053 int i, start;
1054
1055 /* Check for a NULL entry, used to indicate that the parameter goes
1056 both on the stack and in registers. */
1057 if (XEXP (XVECEXP (x, 0, 0), 0))
1058 start = 0;
1059 else
1060 start = 1;
1061
1062 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1063 {
1064 rtx y = XVECEXP (x, 0, i);
1065 if (REG_P (XEXP (y, 0)))
1066 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1067 }
1068 }
1069}
1070
38ae7651
RS
1071/* Assign the RTX X to declaration T. */
1072
1073void
1074set_decl_rtl (tree t, rtx x)
1075{
1076 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1077 if (x)
1078 set_reg_attrs_for_decl_rtl (t, x);
1079}
1080
5141868d
RS
1081/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1082 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1083
1084void
5141868d 1085set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1086{
1087 DECL_INCOMING_RTL (t) = x;
5141868d 1088 if (x && !by_reference_p)
38ae7651
RS
1089 set_reg_attrs_for_decl_rtl (t, x);
1090}
1091
754fdcca
RK
1092/* Identify REG (which may be a CONCAT) as a user register. */
1093
1094void
502b8322 1095mark_user_reg (rtx reg)
754fdcca
RK
1096{
1097 if (GET_CODE (reg) == CONCAT)
1098 {
1099 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1100 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1101 }
754fdcca 1102 else
5b0264cb
NS
1103 {
1104 gcc_assert (REG_P (reg));
1105 REG_USERVAR_P (reg) = 1;
1106 }
754fdcca
RK
1107}
1108
86fe05e0
RK
1109/* Identify REG as a probable pointer register and show its alignment
1110 as ALIGN, if nonzero. */
23b2ce53
RS
1111
1112void
502b8322 1113mark_reg_pointer (rtx reg, int align)
23b2ce53 1114{
3502dc9c 1115 if (! REG_POINTER (reg))
00995e78 1116 {
3502dc9c 1117 REG_POINTER (reg) = 1;
86fe05e0 1118
00995e78
RE
1119 if (align)
1120 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1121 }
1122 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1123 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1124 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1125}
1126
1127/* Return 1 plus largest pseudo reg number used in the current function. */
1128
1129int
502b8322 1130max_reg_num (void)
23b2ce53
RS
1131{
1132 return reg_rtx_no;
1133}
1134
1135/* Return 1 + the largest label number used so far in the current function. */
1136
1137int
502b8322 1138max_label_num (void)
23b2ce53 1139{
23b2ce53
RS
1140 return label_num;
1141}
1142
1143/* Return first label number used in this function (if any were used). */
1144
1145int
502b8322 1146get_first_label_num (void)
23b2ce53
RS
1147{
1148 return first_label_num;
1149}
6de9cd9a
DN
1150
1151/* If the rtx for label was created during the expansion of a nested
1152 function, then first_label_num won't include this label number.
fa10beec 1153 Fix this now so that array indices work later. */
6de9cd9a
DN
1154
1155void
1156maybe_set_first_label_num (rtx x)
1157{
1158 if (CODE_LABEL_NUMBER (x) < first_label_num)
1159 first_label_num = CODE_LABEL_NUMBER (x);
1160}
23b2ce53
RS
1161\f
1162/* Return a value representing some low-order bits of X, where the number
1163 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1164 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1165 representation is returned.
1166
1167 This function handles the cases in common between gen_lowpart, below,
1168 and two variants in cse.c and combine.c. These are the cases that can
1169 be safely handled at all points in the compilation.
1170
1171 If this is not a case we can handle, return 0. */
1172
1173rtx
502b8322 1174gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1175{
ddef6bc7 1176 int msize = GET_MODE_SIZE (mode);
550d1387 1177 int xsize;
ddef6bc7 1178 int offset = 0;
550d1387
GK
1179 enum machine_mode innermode;
1180
1181 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1182 so we have to make one up. Yuk. */
1183 innermode = GET_MODE (x);
db487452
R
1184 if (GET_CODE (x) == CONST_INT
1185 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1186 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1187 else if (innermode == VOIDmode)
1188 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1189
1190 xsize = GET_MODE_SIZE (innermode);
1191
5b0264cb 1192 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1193
550d1387 1194 if (innermode == mode)
23b2ce53
RS
1195 return x;
1196
1197 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1198 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1199 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1200 return 0;
1201
53501a19 1202 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1203 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1204 return 0;
1205
550d1387 1206 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1207
1208 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1209 && (GET_MODE_CLASS (mode) == MODE_INT
1210 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1211 {
1212 /* If we are getting the low-order part of something that has been
1213 sign- or zero-extended, we can either just use the object being
1214 extended or make a narrower extension. If we want an even smaller
1215 piece than the size of the object being extended, call ourselves
1216 recursively.
1217
1218 This case is used mostly by combine and cse. */
1219
1220 if (GET_MODE (XEXP (x, 0)) == mode)
1221 return XEXP (x, 0);
550d1387 1222 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1223 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1224 else if (msize < xsize)
3b80f6ca 1225 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1226 }
f8cfc6aa 1227 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387
GK
1228 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1229 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1230 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1231
23b2ce53
RS
1232 /* Otherwise, we can't do this. */
1233 return 0;
1234}
1235\f
ccba022b 1236rtx
502b8322 1237gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1238{
ddef6bc7 1239 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1240 rtx result;
ddef6bc7 1241
ccba022b
RS
1242 /* This case loses if X is a subreg. To catch bugs early,
1243 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1244 gcc_assert (msize <= UNITS_PER_WORD
1245 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1246
e0e08ac2
JH
1247 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1248 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb
NS
1249 gcc_assert (result);
1250
09482e0d
JW
1251 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1252 the target if we have a MEM. gen_highpart must return a valid operand,
1253 emitting code if necessary to do so. */
5b0264cb
NS
1254 if (MEM_P (result))
1255 {
1256 result = validize_mem (result);
1257 gcc_assert (result);
1258 }
1259
e0e08ac2
JH
1260 return result;
1261}
5222e470 1262
26d249eb 1263/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1264 be VOIDmode constant. */
1265rtx
502b8322 1266gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1267{
1268 if (GET_MODE (exp) != VOIDmode)
1269 {
5b0264cb 1270 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1271 return gen_highpart (outermode, exp);
1272 }
1273 return simplify_gen_subreg (outermode, exp, innermode,
1274 subreg_highpart_offset (outermode, innermode));
1275}
68252e27 1276
38ae7651 1277/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1278
e0e08ac2 1279unsigned int
502b8322 1280subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1281{
1282 unsigned int offset = 0;
1283 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1284
e0e08ac2 1285 if (difference > 0)
ccba022b 1286 {
e0e08ac2
JH
1287 if (WORDS_BIG_ENDIAN)
1288 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1289 if (BYTES_BIG_ENDIAN)
1290 offset += difference % UNITS_PER_WORD;
ccba022b 1291 }
ddef6bc7 1292
e0e08ac2 1293 return offset;
ccba022b 1294}
eea50aa0 1295
e0e08ac2
JH
1296/* Return offset in bytes to get OUTERMODE high part
1297 of the value in mode INNERMODE stored in memory in target format. */
1298unsigned int
502b8322 1299subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1300{
1301 unsigned int offset = 0;
1302 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1303
5b0264cb 1304 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1305
eea50aa0
JH
1306 if (difference > 0)
1307 {
e0e08ac2 1308 if (! WORDS_BIG_ENDIAN)
eea50aa0 1309 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1310 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1311 offset += difference % UNITS_PER_WORD;
1312 }
1313
e0e08ac2 1314 return offset;
eea50aa0 1315}
ccba022b 1316
23b2ce53
RS
1317/* Return 1 iff X, assumed to be a SUBREG,
1318 refers to the least significant part of its containing reg.
1319 If X is not a SUBREG, always return 1 (it is its own low part!). */
1320
1321int
fa233e34 1322subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1323{
1324 if (GET_CODE (x) != SUBREG)
1325 return 1;
a3a03040
RK
1326 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1327 return 0;
23b2ce53 1328
e0e08ac2
JH
1329 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1330 == SUBREG_BYTE (x));
23b2ce53
RS
1331}
1332\f
ddef6bc7
JJ
1333/* Return subword OFFSET of operand OP.
1334 The word number, OFFSET, is interpreted as the word number starting
1335 at the low-order address. OFFSET 0 is the low-order word if not
1336 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1337
1338 If we cannot extract the required word, we return zero. Otherwise,
1339 an rtx corresponding to the requested word will be returned.
1340
1341 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1342 reload has completed, a valid address will always be returned. After
1343 reload, if a valid address cannot be returned, we return zero.
1344
1345 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1346 it is the responsibility of the caller.
1347
1348 MODE is the mode of OP in case it is a CONST_INT.
1349
1350 ??? This is still rather broken for some cases. The problem for the
1351 moment is that all callers of this thing provide no 'goal mode' to
1352 tell us to work with. This exists because all callers were written
0631e0bf
JH
1353 in a word based SUBREG world.
1354 Now use of this function can be deprecated by simplify_subreg in most
1355 cases.
1356 */
ddef6bc7
JJ
1357
1358rtx
502b8322 1359operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1360{
1361 if (mode == VOIDmode)
1362 mode = GET_MODE (op);
1363
5b0264cb 1364 gcc_assert (mode != VOIDmode);
ddef6bc7 1365
30f7a378 1366 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1367 if (mode != BLKmode
1368 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1369 return 0;
1370
30f7a378 1371 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1372 if (mode != BLKmode
1373 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1374 return const0_rtx;
1375
ddef6bc7 1376 /* Form a new MEM at the requested address. */
3c0cb5de 1377 if (MEM_P (op))
ddef6bc7 1378 {
60564289 1379 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1380
f1ec5147 1381 if (! validate_address)
60564289 1382 return new_rtx;
f1ec5147
RK
1383
1384 else if (reload_completed)
ddef6bc7 1385 {
60564289 1386 if (! strict_memory_address_p (word_mode, XEXP (new_rtx, 0)))
f1ec5147 1387 return 0;
ddef6bc7 1388 }
f1ec5147 1389 else
60564289 1390 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1391 }
1392
0631e0bf
JH
1393 /* Rest can be handled by simplify_subreg. */
1394 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1395}
1396
535a42b1
NS
1397/* Similar to `operand_subword', but never return 0. If we can't
1398 extract the required subword, put OP into a register and try again.
1399 The second attempt must succeed. We always validate the address in
1400 this case.
23b2ce53
RS
1401
1402 MODE is the mode of OP, in case it is CONST_INT. */
1403
1404rtx
502b8322 1405operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1406{
ddef6bc7 1407 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1408
1409 if (result)
1410 return result;
1411
1412 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1413 {
1414 /* If this is a register which can not be accessed by words, copy it
1415 to a pseudo register. */
f8cfc6aa 1416 if (REG_P (op))
77e6b0eb
JC
1417 op = copy_to_reg (op);
1418 else
1419 op = force_reg (mode, op);
1420 }
23b2ce53 1421
ddef6bc7 1422 result = operand_subword (op, offset, 1, mode);
5b0264cb 1423 gcc_assert (result);
23b2ce53
RS
1424
1425 return result;
1426}
1427\f
998d7deb
RH
1428/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1429 or (2) a component ref of something variable. Represent the later with
1430 a NULL expression. */
1431
1432static tree
502b8322 1433component_ref_for_mem_expr (tree ref)
998d7deb
RH
1434{
1435 tree inner = TREE_OPERAND (ref, 0);
1436
1437 if (TREE_CODE (inner) == COMPONENT_REF)
1438 inner = component_ref_for_mem_expr (inner);
c56e3582
RK
1439 else
1440 {
c56e3582 1441 /* Now remove any conversions: they don't change what the underlying
6fce44af 1442 object is. Likewise for SAVE_EXPR. */
1043771b 1443 while (CONVERT_EXPR_P (inner)
c56e3582 1444 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
6fce44af
RK
1445 || TREE_CODE (inner) == SAVE_EXPR)
1446 inner = TREE_OPERAND (inner, 0);
c56e3582
RK
1447
1448 if (! DECL_P (inner))
1449 inner = NULL_TREE;
1450 }
998d7deb
RH
1451
1452 if (inner == TREE_OPERAND (ref, 0))
1453 return ref;
1454 else
3244e67d
RS
1455 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1456 TREE_OPERAND (ref, 1), NULL_TREE);
998d7deb 1457}
173b24b9 1458
2b3493c8
AK
1459/* Returns 1 if both MEM_EXPR can be considered equal
1460 and 0 otherwise. */
1461
1462int
4f588890 1463mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1464{
1465 if (expr1 == expr2)
1466 return 1;
1467
1468 if (! expr1 || ! expr2)
1469 return 0;
1470
1471 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1472 return 0;
1473
1474 if (TREE_CODE (expr1) == COMPONENT_REF)
1475 return
1476 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1477 TREE_OPERAND (expr2, 0))
1478 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1479 TREE_OPERAND (expr2, 1));
1480
1b096a0a 1481 if (INDIRECT_REF_P (expr1))
2b3493c8
AK
1482 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1483 TREE_OPERAND (expr2, 0));
2b3493c8 1484
5b0264cb 1485 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
2b3493c8 1486 have been resolved here. */
5b0264cb
NS
1487 gcc_assert (DECL_P (expr1));
1488
1489 /* Decls with different pointers can't be equal. */
1490 return 0;
2b3493c8
AK
1491}
1492
805903b5
JJ
1493/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1494 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1495 -1 if not known. */
1496
1497int
1498get_mem_align_offset (rtx mem, int align)
1499{
1500 tree expr;
1501 unsigned HOST_WIDE_INT offset;
1502
1503 /* This function can't use
1504 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1505 || !CONST_INT_P (MEM_OFFSET (mem))
1506 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
1507 < align))
1508 return -1;
1509 else
1510 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1511 for two reasons:
1512 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1513 for <variable>. get_inner_reference doesn't handle it and
1514 even if it did, the alignment in that case needs to be determined
1515 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1516 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1517 isn't sufficiently aligned, the object it is in might be. */
1518 gcc_assert (MEM_P (mem));
1519 expr = MEM_EXPR (mem);
1520 if (expr == NULL_TREE
1521 || MEM_OFFSET (mem) == NULL_RTX
1522 || !CONST_INT_P (MEM_OFFSET (mem)))
1523 return -1;
1524
1525 offset = INTVAL (MEM_OFFSET (mem));
1526 if (DECL_P (expr))
1527 {
1528 if (DECL_ALIGN (expr) < align)
1529 return -1;
1530 }
1531 else if (INDIRECT_REF_P (expr))
1532 {
1533 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1534 return -1;
1535 }
1536 else if (TREE_CODE (expr) == COMPONENT_REF)
1537 {
1538 while (1)
1539 {
1540 tree inner = TREE_OPERAND (expr, 0);
1541 tree field = TREE_OPERAND (expr, 1);
1542 tree byte_offset = component_ref_field_offset (expr);
1543 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1544
1545 if (!byte_offset
1546 || !host_integerp (byte_offset, 1)
1547 || !host_integerp (bit_offset, 1))
1548 return -1;
1549
1550 offset += tree_low_cst (byte_offset, 1);
1551 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1552
1553 if (inner == NULL_TREE)
1554 {
1555 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1556 < (unsigned int) align)
1557 return -1;
1558 break;
1559 }
1560 else if (DECL_P (inner))
1561 {
1562 if (DECL_ALIGN (inner) < align)
1563 return -1;
1564 break;
1565 }
1566 else if (TREE_CODE (inner) != COMPONENT_REF)
1567 return -1;
1568 expr = inner;
1569 }
1570 }
1571 else
1572 return -1;
1573
1574 return offset & ((align / BITS_PER_UNIT) - 1);
1575}
1576
6926c713 1577/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1578 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1579 if we are making a new object of this type. BITPOS is nonzero if
1580 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1581
1582void
502b8322
AJ
1583set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1584 HOST_WIDE_INT bitpos)
173b24b9 1585{
4862826d 1586 alias_set_type alias = MEM_ALIAS_SET (ref);
998d7deb 1587 tree expr = MEM_EXPR (ref);
8ac61af7
RK
1588 rtx offset = MEM_OFFSET (ref);
1589 rtx size = MEM_SIZE (ref);
1590 unsigned int align = MEM_ALIGN (ref);
6f1087be 1591 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1592 tree type;
1593
1594 /* It can happen that type_for_mode was given a mode for which there
1595 is no language-level type. In which case it returns NULL, which
1596 we can see here. */
1597 if (t == NULL_TREE)
1598 return;
1599
1600 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1601 if (type == error_mark_node)
1602 return;
173b24b9 1603
173b24b9
RK
1604 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1605 wrong answer, as it assumes that DECL_RTL already has the right alias
1606 info. Callers should not set DECL_RTL until after the call to
1607 set_mem_attributes. */
5b0264cb 1608 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1609
738cc472 1610 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1611 front-end routine) and use it. */
1612 alias = get_alias_set (t);
173b24b9 1613
a5e9c810 1614 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
07cb6e8c
JM
1615 MEM_IN_STRUCT_P (ref)
1616 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
f8ad8d7c 1617 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1618
8ac61af7
RK
1619 /* If we are making an object of this type, or if this is a DECL, we know
1620 that it is a scalar if the type is not an aggregate. */
07cb6e8c
JM
1621 if ((objectp || DECL_P (t))
1622 && ! AGGREGATE_TYPE_P (type)
1623 && TREE_CODE (type) != COMPLEX_TYPE)
173b24b9
RK
1624 MEM_SCALAR_P (ref) = 1;
1625
c3d32120
RK
1626 /* We can set the alignment from the type if we are making an object,
1627 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
7ccf35ed
DN
1628 if (objectp || TREE_CODE (t) == INDIRECT_REF
1629 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1630 || TYPE_ALIGN_OK (type))
c3d32120 1631 align = MAX (align, TYPE_ALIGN (type));
7ccf35ed
DN
1632 else
1633 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1634 {
1635 if (integer_zerop (TREE_OPERAND (t, 1)))
1636 /* We don't know anything about the alignment. */
1637 align = BITS_PER_UNIT;
1638 else
1639 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1640 }
40c0668b 1641
738cc472
RK
1642 /* If the size is known, we can set that. */
1643 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1644 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1645
80965c18
RK
1646 /* If T is not a type, we may be able to deduce some more information about
1647 the expression. */
1648 if (! TYPE_P (t))
8ac61af7 1649 {
8476af98 1650 tree base;
df96b059 1651 bool align_computed = false;
389fdba0 1652
8ac61af7
RK
1653 if (TREE_THIS_VOLATILE (t))
1654 MEM_VOLATILE_P (ref) = 1;
173b24b9 1655
c56e3582
RK
1656 /* Now remove any conversions: they don't change what the underlying
1657 object is. Likewise for SAVE_EXPR. */
1043771b 1658 while (CONVERT_EXPR_P (t)
c56e3582
RK
1659 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1660 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1661 t = TREE_OPERAND (t, 0);
1662
8476af98
RH
1663 /* We may look through structure-like accesses for the purposes of
1664 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1665 base = t;
1666 while (TREE_CODE (base) == COMPONENT_REF
1667 || TREE_CODE (base) == REALPART_EXPR
1668 || TREE_CODE (base) == IMAGPART_EXPR
1669 || TREE_CODE (base) == BIT_FIELD_REF)
1670 base = TREE_OPERAND (base, 0);
1671
1672 if (DECL_P (base))
1673 {
1674 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1675 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1676 else
1677 MEM_NOTRAP_P (ref) = 1;
1678 }
1679 else
1680 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1681
1682 base = get_base_address (base);
1683 if (base && DECL_P (base)
1684 && TREE_READONLY (base)
1685 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1686 {
1687 tree base_type = TREE_TYPE (base);
1688 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1689 || DECL_ARTIFICIAL (base));
1690 MEM_READONLY_P (ref) = 1;
1691 }
1692
2039d7aa
RH
1693 /* If this expression uses it's parent's alias set, mark it such
1694 that we won't change it. */
1695 if (component_uses_parent_alias_set (t))
10b76d73
RK
1696 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1697
8ac61af7
RK
1698 /* If this is a decl, set the attributes of the MEM from it. */
1699 if (DECL_P (t))
1700 {
998d7deb
RH
1701 expr = t;
1702 offset = const0_rtx;
6f1087be 1703 apply_bitpos = bitpos;
8ac61af7
RK
1704 size = (DECL_SIZE_UNIT (t)
1705 && host_integerp (DECL_SIZE_UNIT (t), 1)
1706 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1707 align = DECL_ALIGN (t);
df96b059 1708 align_computed = true;
8ac61af7
RK
1709 }
1710
40c0668b 1711 /* If this is a constant, we know the alignment. */
6615c446 1712 else if (CONSTANT_CLASS_P (t))
9ddfb1a7
RK
1713 {
1714 align = TYPE_ALIGN (type);
1715#ifdef CONSTANT_ALIGNMENT
1716 align = CONSTANT_ALIGNMENT (t, align);
1717#endif
df96b059 1718 align_computed = true;
9ddfb1a7 1719 }
998d7deb
RH
1720
1721 /* If this is a field reference and not a bit-field, record it. */
fa10beec 1722 /* ??? There is some information that can be gleaned from bit-fields,
998d7deb
RH
1723 such as the word offset in the structure that might be modified.
1724 But skip it for now. */
1725 else if (TREE_CODE (t) == COMPONENT_REF
1726 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1727 {
1728 expr = component_ref_for_mem_expr (t);
1729 offset = const0_rtx;
6f1087be 1730 apply_bitpos = bitpos;
998d7deb
RH
1731 /* ??? Any reason the field size would be different than
1732 the size we got from the type? */
1733 }
1734
1735 /* If this is an array reference, look for an outer field reference. */
1736 else if (TREE_CODE (t) == ARRAY_REF)
1737 {
1738 tree off_tree = size_zero_node;
1b1838b6
JW
1739 /* We can't modify t, because we use it at the end of the
1740 function. */
1741 tree t2 = t;
998d7deb
RH
1742
1743 do
1744 {
1b1838b6 1745 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1746 tree low_bound = array_ref_low_bound (t2);
1747 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1748
1749 /* We assume all arrays have sizes that are a multiple of a byte.
1750 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1751 index, then convert to sizetype and multiply by the size of
1752 the array element. */
1753 if (! integer_zerop (low_bound))
4845b383
KH
1754 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1755 index, low_bound);
2567406a 1756
44de5aeb 1757 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1758 size_binop (MULT_EXPR,
1759 fold_convert (sizetype,
1760 index),
44de5aeb
RK
1761 unit_size),
1762 off_tree);
1b1838b6 1763 t2 = TREE_OPERAND (t2, 0);
998d7deb 1764 }
1b1838b6 1765 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1766
1b1838b6 1767 if (DECL_P (t2))
c67a1cf6 1768 {
1b1838b6 1769 expr = t2;
40cb04f1 1770 offset = NULL;
c67a1cf6 1771 if (host_integerp (off_tree, 1))
40cb04f1
RH
1772 {
1773 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1774 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1775 align = DECL_ALIGN (t2);
fc555370 1776 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1 1777 align = aoff;
df96b059 1778 align_computed = true;
40cb04f1 1779 offset = GEN_INT (ioff);
6f1087be 1780 apply_bitpos = bitpos;
40cb04f1 1781 }
c67a1cf6 1782 }
1b1838b6 1783 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1784 {
1b1838b6 1785 expr = component_ref_for_mem_expr (t2);
998d7deb 1786 if (host_integerp (off_tree, 1))
6f1087be
RH
1787 {
1788 offset = GEN_INT (tree_low_cst (off_tree, 1));
1789 apply_bitpos = bitpos;
1790 }
998d7deb
RH
1791 /* ??? Any reason the field size would be different than
1792 the size we got from the type? */
1793 }
c67a1cf6 1794 else if (flag_argument_noalias > 1
1b096a0a 1795 && (INDIRECT_REF_P (t2))
1b1838b6 1796 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
c67a1cf6 1797 {
1b1838b6 1798 expr = t2;
c67a1cf6
RH
1799 offset = NULL;
1800 }
1801 }
1802
1803 /* If this is a Fortran indirect argument reference, record the
1804 parameter decl. */
1805 else if (flag_argument_noalias > 1
1b096a0a 1806 && (INDIRECT_REF_P (t))
c67a1cf6
RH
1807 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1808 {
1809 expr = t;
1810 offset = NULL;
998d7deb 1811 }
df96b059
JJ
1812
1813 if (!align_computed && !INDIRECT_REF_P (t))
1814 {
1815 unsigned int obj_align
1816 = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
1817 align = MAX (align, obj_align);
1818 }
8ac61af7
RK
1819 }
1820
15c812e3 1821 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1822 bit position offset. Similarly, increase the size of the accessed
1823 object to contain the negative offset. */
6f1087be 1824 if (apply_bitpos)
8c317c5f
RH
1825 {
1826 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1827 if (size)
1828 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1829 }
6f1087be 1830
7ccf35ed
DN
1831 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1832 {
fa10beec 1833 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
7ccf35ed
DN
1834 we're overlapping. */
1835 offset = NULL;
1836 expr = NULL;
1837 }
1838
8ac61af7 1839 /* Now set the attributes we computed above. */
10b76d73 1840 MEM_ATTRS (ref)
998d7deb 1841 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
8ac61af7
RK
1842
1843 /* If this is already known to be a scalar or aggregate, we are done. */
1844 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1845 return;
1846
8ac61af7
RK
1847 /* If it is a reference into an aggregate, this is part of an aggregate.
1848 Otherwise we don't know. */
173b24b9
RK
1849 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1850 || TREE_CODE (t) == ARRAY_RANGE_REF
1851 || TREE_CODE (t) == BIT_FIELD_REF)
1852 MEM_IN_STRUCT_P (ref) = 1;
1853}
1854
6f1087be 1855void
502b8322 1856set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1857{
1858 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1859}
1860
895a8136 1861/* Set MEM to the decl that REG refers to. */
a560d4d4
JH
1862
1863void
502b8322 1864set_mem_attrs_from_reg (rtx mem, rtx reg)
a560d4d4
JH
1865{
1866 MEM_ATTRS (mem)
1867 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1868 GEN_INT (REG_OFFSET (reg)),
1869 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1870}
1871
173b24b9
RK
1872/* Set the alias set of MEM to SET. */
1873
1874void
4862826d 1875set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1876{
68252e27 1877#ifdef ENABLE_CHECKING
173b24b9 1878 /* If the new and old alias sets don't conflict, something is wrong. */
5b0264cb 1879 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
173b24b9
RK
1880#endif
1881
998d7deb 1882 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73
RK
1883 MEM_SIZE (mem), MEM_ALIGN (mem),
1884 GET_MODE (mem));
173b24b9 1885}
738cc472 1886
d022d93e 1887/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1888
1889void
502b8322 1890set_mem_align (rtx mem, unsigned int align)
738cc472 1891{
998d7deb 1892 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73
RK
1893 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1894 GET_MODE (mem));
738cc472 1895}
1285011e 1896
998d7deb 1897/* Set the expr for MEM to EXPR. */
1285011e
RK
1898
1899void
502b8322 1900set_mem_expr (rtx mem, tree expr)
1285011e
RK
1901{
1902 MEM_ATTRS (mem)
998d7deb 1903 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1285011e
RK
1904 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1905}
998d7deb
RH
1906
1907/* Set the offset of MEM to OFFSET. */
1908
1909void
502b8322 1910set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
1911{
1912 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1913 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1914 GET_MODE (mem));
35aff10b
AM
1915}
1916
1917/* Set the size of MEM to SIZE. */
1918
1919void
502b8322 1920set_mem_size (rtx mem, rtx size)
35aff10b
AM
1921{
1922 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1923 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1924 GET_MODE (mem));
998d7deb 1925}
173b24b9 1926\f
738cc472
RK
1927/* Return a memory reference like MEMREF, but with its mode changed to MODE
1928 and its address changed to ADDR. (VOIDmode means don't change the mode.
1929 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1930 returned memory location is required to be valid. The memory
1931 attributes are not changed. */
23b2ce53 1932
738cc472 1933static rtx
502b8322 1934change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1935{
60564289 1936 rtx new_rtx;
23b2ce53 1937
5b0264cb 1938 gcc_assert (MEM_P (memref));
23b2ce53
RS
1939 if (mode == VOIDmode)
1940 mode = GET_MODE (memref);
1941 if (addr == 0)
1942 addr = XEXP (memref, 0);
a74ff877
JH
1943 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1944 && (!validate || memory_address_p (mode, addr)))
1945 return memref;
23b2ce53 1946
f1ec5147 1947 if (validate)
23b2ce53 1948 {
f1ec5147 1949 if (reload_in_progress || reload_completed)
5b0264cb 1950 gcc_assert (memory_address_p (mode, addr));
f1ec5147
RK
1951 else
1952 addr = memory_address (mode, addr);
23b2ce53 1953 }
750c9258 1954
9b04c6a8
RK
1955 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1956 return memref;
1957
60564289
KG
1958 new_rtx = gen_rtx_MEM (mode, addr);
1959 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1960 return new_rtx;
23b2ce53 1961}
792760b9 1962
738cc472
RK
1963/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1964 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1965
1966rtx
502b8322 1967change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1968{
60564289
KG
1969 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1970 enum machine_mode mmode = GET_MODE (new_rtx);
4e44c1ef
JJ
1971 unsigned int align;
1972
1973 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1974 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
c2f7bcc3 1975
fdb1c7b3 1976 /* If there are no changes, just return the original memory reference. */
60564289 1977 if (new_rtx == memref)
4e44c1ef
JJ
1978 {
1979 if (MEM_ATTRS (memref) == 0
1980 || (MEM_EXPR (memref) == NULL
1981 && MEM_OFFSET (memref) == NULL
1982 && MEM_SIZE (memref) == size
1983 && MEM_ALIGN (memref) == align))
60564289 1984 return new_rtx;
4e44c1ef 1985
60564289
KG
1986 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1987 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 1988 }
fdb1c7b3 1989
60564289 1990 MEM_ATTRS (new_rtx)
4e44c1ef 1991 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
823e3574 1992
60564289 1993 return new_rtx;
f4ef873c 1994}
792760b9 1995
738cc472
RK
1996/* Return a memory reference like MEMREF, but with its mode changed
1997 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
1998 nonzero, the memory address is forced to be valid.
1999 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2000 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
2001
2002rtx
502b8322
AJ
2003adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2004 int validate, int adjust)
f1ec5147 2005{
823e3574 2006 rtx addr = XEXP (memref, 0);
60564289 2007 rtx new_rtx;
738cc472 2008 rtx memoffset = MEM_OFFSET (memref);
10b76d73 2009 rtx size = 0;
738cc472 2010 unsigned int memalign = MEM_ALIGN (memref);
823e3574 2011
fdb1c7b3
JH
2012 /* If there are no changes, just return the original memory reference. */
2013 if (mode == GET_MODE (memref) && !offset
2014 && (!validate || memory_address_p (mode, addr)))
2015 return memref;
2016
d14419e4 2017 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2018 This may happen even if offset is nonzero -- consider
d14419e4
RH
2019 (plus (plus reg reg) const_int) -- so do this always. */
2020 addr = copy_rtx (addr);
2021
4a78c787
RH
2022 if (adjust)
2023 {
2024 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2025 object, we can merge it into the LO_SUM. */
2026 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2027 && offset >= 0
2028 && (unsigned HOST_WIDE_INT) offset
2029 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2030 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2031 plus_constant (XEXP (addr, 1), offset));
2032 else
2033 addr = plus_constant (addr, offset);
2034 }
823e3574 2035
60564289 2036 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472
RK
2037
2038 /* Compute the new values of the memory attributes due to this adjustment.
2039 We add the offsets and update the alignment. */
2040 if (memoffset)
2041 memoffset = GEN_INT (offset + INTVAL (memoffset));
2042
03bf2c23
RK
2043 /* Compute the new alignment by taking the MIN of the alignment and the
2044 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2045 if zero. */
2046 if (offset != 0)
3bf1e984
RK
2047 memalign
2048 = MIN (memalign,
2049 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 2050
10b76d73 2051 /* We can compute the size in a number of ways. */
60564289
KG
2052 if (GET_MODE (new_rtx) != BLKmode)
2053 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
10b76d73
RK
2054 else if (MEM_SIZE (memref))
2055 size = plus_constant (MEM_SIZE (memref), -offset);
2056
60564289
KG
2057 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2058 memoffset, size, memalign, GET_MODE (new_rtx));
738cc472
RK
2059
2060 /* At some point, we should validate that this offset is within the object,
2061 if all the appropriate values are known. */
60564289 2062 return new_rtx;
f1ec5147
RK
2063}
2064
630036c6
JJ
2065/* Return a memory reference like MEMREF, but with its mode changed
2066 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2067 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2068 nonzero, the memory address is forced to be valid. */
2069
2070rtx
502b8322
AJ
2071adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2072 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2073{
2074 memref = change_address_1 (memref, VOIDmode, addr, validate);
2075 return adjust_address_1 (memref, mode, offset, validate, 0);
2076}
2077
8ac61af7
RK
2078/* Return a memory reference like MEMREF, but whose address is changed by
2079 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2080 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2081
2082rtx
502b8322 2083offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2084{
60564289 2085 rtx new_rtx, addr = XEXP (memref, 0);
e3c8ea67 2086
60564289 2087 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
e3c8ea67 2088
68252e27 2089 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2090 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2091
2092 However, if we did go and rearrange things, we can wind up not
2093 being able to recognize the magic around pic_offset_table_rtx.
2094 This stuff is fragile, and is yet another example of why it is
2095 bad to expose PIC machinery too early. */
60564289 2096 if (! memory_address_p (GET_MODE (memref), new_rtx)
e3c8ea67
RH
2097 && GET_CODE (addr) == PLUS
2098 && XEXP (addr, 0) == pic_offset_table_rtx)
2099 {
2100 addr = force_reg (GET_MODE (addr), addr);
60564289 2101 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
e3c8ea67
RH
2102 }
2103
60564289
KG
2104 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2105 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2106
fdb1c7b3 2107 /* If there are no changes, just return the original memory reference. */
60564289
KG
2108 if (new_rtx == memref)
2109 return new_rtx;
fdb1c7b3 2110
0d4903b8
RK
2111 /* Update the alignment to reflect the offset. Reset the offset, which
2112 we don't know. */
60564289 2113 MEM_ATTRS (new_rtx)
2cc2d4bb 2114 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 2115 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
60564289
KG
2116 GET_MODE (new_rtx));
2117 return new_rtx;
0d4903b8 2118}
68252e27 2119
792760b9
RK
2120/* Return a memory reference like MEMREF, but with its address changed to
2121 ADDR. The caller is asserting that the actual piece of memory pointed
2122 to is the same, just the form of the address is being changed, such as
2123 by putting something into a register. */
2124
2125rtx
502b8322 2126replace_equiv_address (rtx memref, rtx addr)
792760b9 2127{
738cc472
RK
2128 /* change_address_1 copies the memory attribute structure without change
2129 and that's exactly what we want here. */
40c0668b 2130 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2131 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2132}
738cc472 2133
f1ec5147
RK
2134/* Likewise, but the reference is not required to be valid. */
2135
2136rtx
502b8322 2137replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2138{
f1ec5147
RK
2139 return change_address_1 (memref, VOIDmode, addr, 0);
2140}
e7dfe4bb
RH
2141
2142/* Return a memory reference like MEMREF, but with its mode widened to
2143 MODE and offset by OFFSET. This would be used by targets that e.g.
2144 cannot issue QImode memory operations and have to use SImode memory
2145 operations plus masking logic. */
2146
2147rtx
502b8322 2148widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2149{
60564289
KG
2150 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2151 tree expr = MEM_EXPR (new_rtx);
2152 rtx memoffset = MEM_OFFSET (new_rtx);
e7dfe4bb
RH
2153 unsigned int size = GET_MODE_SIZE (mode);
2154
fdb1c7b3 2155 /* If there are no changes, just return the original memory reference. */
60564289
KG
2156 if (new_rtx == memref)
2157 return new_rtx;
fdb1c7b3 2158
e7dfe4bb
RH
2159 /* If we don't know what offset we were at within the expression, then
2160 we can't know if we've overstepped the bounds. */
fa1591cb 2161 if (! memoffset)
e7dfe4bb
RH
2162 expr = NULL_TREE;
2163
2164 while (expr)
2165 {
2166 if (TREE_CODE (expr) == COMPONENT_REF)
2167 {
2168 tree field = TREE_OPERAND (expr, 1);
44de5aeb 2169 tree offset = component_ref_field_offset (expr);
e7dfe4bb
RH
2170
2171 if (! DECL_SIZE_UNIT (field))
2172 {
2173 expr = NULL_TREE;
2174 break;
2175 }
2176
2177 /* Is the field at least as large as the access? If so, ok,
2178 otherwise strip back to the containing structure. */
03667700
RK
2179 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2180 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
2181 && INTVAL (memoffset) >= 0)
2182 break;
2183
44de5aeb 2184 if (! host_integerp (offset, 1))
e7dfe4bb
RH
2185 {
2186 expr = NULL_TREE;
2187 break;
2188 }
2189
2190 expr = TREE_OPERAND (expr, 0);
44de5aeb
RK
2191 memoffset
2192 = (GEN_INT (INTVAL (memoffset)
2193 + tree_low_cst (offset, 1)
2194 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2195 / BITS_PER_UNIT)));
e7dfe4bb
RH
2196 }
2197 /* Similarly for the decl. */
2198 else if (DECL_P (expr)
2199 && DECL_SIZE_UNIT (expr)
45f79783 2200 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
2201 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2202 && (! memoffset || INTVAL (memoffset) >= 0))
2203 break;
2204 else
2205 {
2206 /* The widened memory access overflows the expression, which means
2207 that it could alias another expression. Zap it. */
2208 expr = NULL_TREE;
2209 break;
2210 }
2211 }
2212
2213 if (! expr)
2214 memoffset = NULL_RTX;
2215
2216 /* The widened memory may alias other stuff, so zap the alias set. */
2217 /* ??? Maybe use get_alias_set on any remaining expression. */
2218
60564289
KG
2219 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2220 MEM_ALIGN (new_rtx), mode);
e7dfe4bb 2221
60564289 2222 return new_rtx;
e7dfe4bb 2223}
23b2ce53 2224\f
f6129d66
RH
2225/* A fake decl that is used as the MEM_EXPR of spill slots. */
2226static GTY(()) tree spill_slot_decl;
2227
3d7e23f6
RH
2228tree
2229get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2230{
2231 tree d = spill_slot_decl;
2232 rtx rd;
2233
3d7e23f6 2234 if (d || !force_build_p)
f6129d66
RH
2235 return d;
2236
2237 d = build_decl (VAR_DECL, get_identifier ("%sfp"), void_type_node);
2238 DECL_ARTIFICIAL (d) = 1;
2239 DECL_IGNORED_P (d) = 1;
2240 TREE_USED (d) = 1;
2241 TREE_THIS_NOTRAP (d) = 1;
2242 spill_slot_decl = d;
2243
2244 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2245 MEM_NOTRAP_P (rd) = 1;
2246 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2247 NULL_RTX, 0, BLKmode);
2248 SET_DECL_RTL (d, rd);
2249
2250 return d;
2251}
2252
2253/* Given MEM, a result from assign_stack_local, fill in the memory
2254 attributes as appropriate for a register allocator spill slot.
2255 These slots are not aliasable by other memory. We arrange for
2256 them all to use a single MEM_EXPR, so that the aliasing code can
2257 work properly in the case of shared spill slots. */
2258
2259void
2260set_mem_attrs_for_spill (rtx mem)
2261{
2262 alias_set_type alias;
2263 rtx addr, offset;
2264 tree expr;
2265
3d7e23f6 2266 expr = get_spill_slot_decl (true);
f6129d66
RH
2267 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2268
2269 /* We expect the incoming memory to be of the form:
2270 (mem:MODE (plus (reg sfp) (const_int offset)))
2271 with perhaps the plus missing for offset = 0. */
2272 addr = XEXP (mem, 0);
2273 offset = const0_rtx;
2274 if (GET_CODE (addr) == PLUS
2275 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2276 offset = XEXP (addr, 1);
2277
2278 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2279 MEM_SIZE (mem), MEM_ALIGN (mem),
2280 GET_MODE (mem));
2281 MEM_NOTRAP_P (mem) = 1;
2282}
2283\f
23b2ce53
RS
2284/* Return a newly created CODE_LABEL rtx with a unique label number. */
2285
2286rtx
502b8322 2287gen_label_rtx (void)
23b2ce53 2288{
0dc36574 2289 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2290 NULL, label_num++, NULL);
23b2ce53
RS
2291}
2292\f
2293/* For procedure integration. */
2294
23b2ce53 2295/* Install new pointers to the first and last insns in the chain.
86fe05e0 2296 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2297 Used for an inline-procedure after copying the insn chain. */
2298
2299void
502b8322 2300set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2301{
86fe05e0
RK
2302 rtx insn;
2303
23b2ce53
RS
2304 first_insn = first;
2305 last_insn = last;
86fe05e0
RK
2306 cur_insn_uid = 0;
2307
2308 for (insn = first; insn; insn = NEXT_INSN (insn))
2309 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2310
2311 cur_insn_uid++;
23b2ce53 2312}
23b2ce53 2313\f
750c9258 2314/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2315 structure. This routine should only be called once. */
23b2ce53 2316
fd743bc1 2317static void
b4aaa77b 2318unshare_all_rtl_1 (rtx insn)
23b2ce53 2319{
d1b81779 2320 /* Unshare just about everything else. */
2c07f13b 2321 unshare_all_rtl_in_chain (insn);
750c9258 2322
23b2ce53
RS
2323 /* Make sure the addresses of stack slots found outside the insn chain
2324 (such as, in DECL_RTL of a variable) are not shared
2325 with the insn chain.
2326
2327 This special care is necessary when the stack slot MEM does not
2328 actually appear in the insn chain. If it does appear, its address
2329 is unshared from all else at that point. */
242b0ce6 2330 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2331}
2332
750c9258 2333/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2334 structure, again. This is a fairly expensive thing to do so it
2335 should be done sparingly. */
2336
2337void
502b8322 2338unshare_all_rtl_again (rtx insn)
d1b81779
GK
2339{
2340 rtx p;
624c87aa
RE
2341 tree decl;
2342
d1b81779 2343 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2344 if (INSN_P (p))
d1b81779
GK
2345 {
2346 reset_used_flags (PATTERN (p));
2347 reset_used_flags (REG_NOTES (p));
d1b81779 2348 }
624c87aa 2349
2d4aecb3 2350 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2351 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2352
624c87aa
RE
2353 /* Make sure that virtual parameters are not shared. */
2354 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
5eb2a9f2 2355 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2356
2357 reset_used_flags (stack_slot_list);
2358
b4aaa77b 2359 unshare_all_rtl_1 (insn);
fd743bc1
PB
2360}
2361
c2924966 2362unsigned int
fd743bc1
PB
2363unshare_all_rtl (void)
2364{
b4aaa77b 2365 unshare_all_rtl_1 (get_insns ());
c2924966 2366 return 0;
d1b81779
GK
2367}
2368
8ddbbcae 2369struct rtl_opt_pass pass_unshare_all_rtl =
ef330312 2370{
8ddbbcae
JH
2371 {
2372 RTL_PASS,
defb77dc 2373 "unshare", /* name */
ef330312
PB
2374 NULL, /* gate */
2375 unshare_all_rtl, /* execute */
2376 NULL, /* sub */
2377 NULL, /* next */
2378 0, /* static_pass_number */
2379 0, /* tv_id */
2380 0, /* properties_required */
2381 0, /* properties_provided */
2382 0, /* properties_destroyed */
2383 0, /* todo_flags_start */
8ddbbcae
JH
2384 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2385 }
ef330312
PB
2386};
2387
2388
2c07f13b
JH
2389/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2390 Recursively does the same for subexpressions. */
2391
2392static void
2393verify_rtx_sharing (rtx orig, rtx insn)
2394{
2395 rtx x = orig;
2396 int i;
2397 enum rtx_code code;
2398 const char *format_ptr;
2399
2400 if (x == 0)
2401 return;
2402
2403 code = GET_CODE (x);
2404
2405 /* These types may be freely shared. */
2406
2407 switch (code)
2408 {
2409 case REG:
2c07f13b
JH
2410 case CONST_INT:
2411 case CONST_DOUBLE:
091a3ac7 2412 case CONST_FIXED:
2c07f13b
JH
2413 case CONST_VECTOR:
2414 case SYMBOL_REF:
2415 case LABEL_REF:
2416 case CODE_LABEL:
2417 case PC:
2418 case CC0:
2419 case SCRATCH:
2c07f13b 2420 return;
3e89ed8d
JH
2421 /* SCRATCH must be shared because they represent distinct values. */
2422 case CLOBBER:
2423 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2424 return;
2425 break;
2c07f13b
JH
2426
2427 case CONST:
6fb5fa3c 2428 if (shared_const_p (orig))
2c07f13b
JH
2429 return;
2430 break;
2431
2432 case MEM:
2433 /* A MEM is allowed to be shared if its address is constant. */
2434 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2435 || reload_completed || reload_in_progress)
2436 return;
2437
2438 break;
2439
2440 default:
2441 break;
2442 }
2443
2444 /* This rtx may not be shared. If it has already been seen,
2445 replace it with a copy of itself. */
1a2caa7a 2446#ifdef ENABLE_CHECKING
2c07f13b
JH
2447 if (RTX_FLAG (x, used))
2448 {
ab532386 2449 error ("invalid rtl sharing found in the insn");
2c07f13b 2450 debug_rtx (insn);
ab532386 2451 error ("shared rtx");
2c07f13b 2452 debug_rtx (x);
ab532386 2453 internal_error ("internal consistency failure");
2c07f13b 2454 }
1a2caa7a
NS
2455#endif
2456 gcc_assert (!RTX_FLAG (x, used));
2457
2c07f13b
JH
2458 RTX_FLAG (x, used) = 1;
2459
6614fd40 2460 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2461
2462 format_ptr = GET_RTX_FORMAT (code);
2463
2464 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2465 {
2466 switch (*format_ptr++)
2467 {
2468 case 'e':
2469 verify_rtx_sharing (XEXP (x, i), insn);
2470 break;
2471
2472 case 'E':
2473 if (XVEC (x, i) != NULL)
2474 {
2475 int j;
2476 int len = XVECLEN (x, i);
2477
2478 for (j = 0; j < len; j++)
2479 {
1a2caa7a
NS
2480 /* We allow sharing of ASM_OPERANDS inside single
2481 instruction. */
2c07f13b 2482 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2483 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2484 == ASM_OPERANDS))
2c07f13b
JH
2485 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2486 else
2487 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2488 }
2489 }
2490 break;
2491 }
2492 }
2493 return;
2494}
2495
ba228239 2496/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2497 sharing in between the subexpressions. */
2498
2499void
2500verify_rtl_sharing (void)
2501{
2502 rtx p;
2503
2504 for (p = get_insns (); p; p = NEXT_INSN (p))
2505 if (INSN_P (p))
2506 {
2507 reset_used_flags (PATTERN (p));
2508 reset_used_flags (REG_NOTES (p));
2954a813
KK
2509 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2510 {
2511 int i;
2512 rtx q, sequence = PATTERN (p);
2513
2514 for (i = 0; i < XVECLEN (sequence, 0); i++)
2515 {
2516 q = XVECEXP (sequence, 0, i);
2517 gcc_assert (INSN_P (q));
2518 reset_used_flags (PATTERN (q));
2519 reset_used_flags (REG_NOTES (q));
2954a813
KK
2520 }
2521 }
2c07f13b
JH
2522 }
2523
2524 for (p = get_insns (); p; p = NEXT_INSN (p))
2525 if (INSN_P (p))
2526 {
2527 verify_rtx_sharing (PATTERN (p), p);
2528 verify_rtx_sharing (REG_NOTES (p), p);
2c07f13b
JH
2529 }
2530}
2531
d1b81779
GK
2532/* Go through all the RTL insn bodies and copy any invalid shared structure.
2533 Assumes the mark bits are cleared at entry. */
2534
2c07f13b
JH
2535void
2536unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2537{
2538 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2539 if (INSN_P (insn))
d1b81779
GK
2540 {
2541 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2542 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
d1b81779
GK
2543 }
2544}
2545
2d4aecb3 2546/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2547 shared. We never replace the DECL_RTLs themselves with a copy,
2548 but expressions mentioned into a DECL_RTL cannot be shared with
2549 expressions in the instruction stream.
2550
2551 Note that reload may convert pseudo registers into memories in-place.
2552 Pseudo registers are always shared, but MEMs never are. Thus if we
2553 reset the used flags on MEMs in the instruction stream, we must set
2554 them again on MEMs that appear in DECL_RTLs. */
2555
2d4aecb3 2556static void
5eb2a9f2 2557set_used_decls (tree blk)
2d4aecb3
AO
2558{
2559 tree t;
2560
2561 /* Mark decls. */
2562 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c 2563 if (DECL_RTL_SET_P (t))
5eb2a9f2 2564 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2565
2566 /* Now process sub-blocks. */
87caf699 2567 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2568 set_used_decls (t);
2d4aecb3
AO
2569}
2570
23b2ce53 2571/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2572 Recursively does the same for subexpressions. Uses
2573 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2574
2575rtx
502b8322 2576copy_rtx_if_shared (rtx orig)
23b2ce53 2577{
32b32b16
AP
2578 copy_rtx_if_shared_1 (&orig);
2579 return orig;
2580}
2581
ff954f39
AP
2582/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2583 use. Recursively does the same for subexpressions. */
2584
32b32b16
AP
2585static void
2586copy_rtx_if_shared_1 (rtx *orig1)
2587{
2588 rtx x;
b3694847
SS
2589 int i;
2590 enum rtx_code code;
32b32b16 2591 rtx *last_ptr;
b3694847 2592 const char *format_ptr;
23b2ce53 2593 int copied = 0;
32b32b16
AP
2594 int length;
2595
2596 /* Repeat is used to turn tail-recursion into iteration. */
2597repeat:
2598 x = *orig1;
23b2ce53
RS
2599
2600 if (x == 0)
32b32b16 2601 return;
23b2ce53
RS
2602
2603 code = GET_CODE (x);
2604
2605 /* These types may be freely shared. */
2606
2607 switch (code)
2608 {
2609 case REG:
23b2ce53
RS
2610 case CONST_INT:
2611 case CONST_DOUBLE:
091a3ac7 2612 case CONST_FIXED:
69ef87e2 2613 case CONST_VECTOR:
23b2ce53 2614 case SYMBOL_REF:
2c07f13b 2615 case LABEL_REF:
23b2ce53
RS
2616 case CODE_LABEL:
2617 case PC:
2618 case CC0:
2619 case SCRATCH:
0f41302f 2620 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2621 return;
3e89ed8d
JH
2622 case CLOBBER:
2623 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2624 return;
2625 break;
23b2ce53 2626
b851ea09 2627 case CONST:
6fb5fa3c 2628 if (shared_const_p (x))
32b32b16 2629 return;
b851ea09
RK
2630 break;
2631
23b2ce53
RS
2632 case INSN:
2633 case JUMP_INSN:
2634 case CALL_INSN:
2635 case NOTE:
23b2ce53
RS
2636 case BARRIER:
2637 /* The chain of insns is not being copied. */
32b32b16 2638 return;
23b2ce53 2639
e9a25f70
JL
2640 default:
2641 break;
23b2ce53
RS
2642 }
2643
2644 /* This rtx may not be shared. If it has already been seen,
2645 replace it with a copy of itself. */
2646
2adc7f12 2647 if (RTX_FLAG (x, used))
23b2ce53 2648 {
aacd3885 2649 x = shallow_copy_rtx (x);
23b2ce53
RS
2650 copied = 1;
2651 }
2adc7f12 2652 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2653
2654 /* Now scan the subexpressions recursively.
2655 We can store any replaced subexpressions directly into X
2656 since we know X is not shared! Any vectors in X
2657 must be copied if X was copied. */
2658
2659 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2660 length = GET_RTX_LENGTH (code);
2661 last_ptr = NULL;
2662
2663 for (i = 0; i < length; i++)
23b2ce53
RS
2664 {
2665 switch (*format_ptr++)
2666 {
2667 case 'e':
32b32b16
AP
2668 if (last_ptr)
2669 copy_rtx_if_shared_1 (last_ptr);
2670 last_ptr = &XEXP (x, i);
23b2ce53
RS
2671 break;
2672
2673 case 'E':
2674 if (XVEC (x, i) != NULL)
2675 {
b3694847 2676 int j;
f0722107 2677 int len = XVECLEN (x, i);
32b32b16 2678
6614fd40
KH
2679 /* Copy the vector iff I copied the rtx and the length
2680 is nonzero. */
f0722107 2681 if (copied && len > 0)
8f985ec4 2682 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
32b32b16 2683
5d3cc252 2684 /* Call recursively on all inside the vector. */
f0722107 2685 for (j = 0; j < len; j++)
32b32b16
AP
2686 {
2687 if (last_ptr)
2688 copy_rtx_if_shared_1 (last_ptr);
2689 last_ptr = &XVECEXP (x, i, j);
2690 }
23b2ce53
RS
2691 }
2692 break;
2693 }
2694 }
32b32b16
AP
2695 *orig1 = x;
2696 if (last_ptr)
2697 {
2698 orig1 = last_ptr;
2699 goto repeat;
2700 }
2701 return;
23b2ce53
RS
2702}
2703
2704/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2705 to look for shared sub-parts. */
2706
2707void
502b8322 2708reset_used_flags (rtx x)
23b2ce53 2709{
b3694847
SS
2710 int i, j;
2711 enum rtx_code code;
2712 const char *format_ptr;
32b32b16 2713 int length;
23b2ce53 2714
32b32b16
AP
2715 /* Repeat is used to turn tail-recursion into iteration. */
2716repeat:
23b2ce53
RS
2717 if (x == 0)
2718 return;
2719
2720 code = GET_CODE (x);
2721
9faa82d8 2722 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2723 for them. */
2724
2725 switch (code)
2726 {
2727 case REG:
23b2ce53
RS
2728 case CONST_INT:
2729 case CONST_DOUBLE:
091a3ac7 2730 case CONST_FIXED:
69ef87e2 2731 case CONST_VECTOR:
23b2ce53
RS
2732 case SYMBOL_REF:
2733 case CODE_LABEL:
2734 case PC:
2735 case CC0:
2736 return;
2737
2738 case INSN:
2739 case JUMP_INSN:
2740 case CALL_INSN:
2741 case NOTE:
2742 case LABEL_REF:
2743 case BARRIER:
2744 /* The chain of insns is not being copied. */
2745 return;
750c9258 2746
e9a25f70
JL
2747 default:
2748 break;
23b2ce53
RS
2749 }
2750
2adc7f12 2751 RTX_FLAG (x, used) = 0;
23b2ce53
RS
2752
2753 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2754 length = GET_RTX_LENGTH (code);
2755
2756 for (i = 0; i < length; i++)
23b2ce53
RS
2757 {
2758 switch (*format_ptr++)
2759 {
2760 case 'e':
32b32b16
AP
2761 if (i == length-1)
2762 {
2763 x = XEXP (x, i);
2764 goto repeat;
2765 }
23b2ce53
RS
2766 reset_used_flags (XEXP (x, i));
2767 break;
2768
2769 case 'E':
2770 for (j = 0; j < XVECLEN (x, i); j++)
2771 reset_used_flags (XVECEXP (x, i, j));
2772 break;
2773 }
2774 }
2775}
2c07f13b
JH
2776
2777/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2778 to look for shared sub-parts. */
2779
2780void
2781set_used_flags (rtx x)
2782{
2783 int i, j;
2784 enum rtx_code code;
2785 const char *format_ptr;
2786
2787 if (x == 0)
2788 return;
2789
2790 code = GET_CODE (x);
2791
2792 /* These types may be freely shared so we needn't do any resetting
2793 for them. */
2794
2795 switch (code)
2796 {
2797 case REG:
2c07f13b
JH
2798 case CONST_INT:
2799 case CONST_DOUBLE:
091a3ac7 2800 case CONST_FIXED:
2c07f13b
JH
2801 case CONST_VECTOR:
2802 case SYMBOL_REF:
2803 case CODE_LABEL:
2804 case PC:
2805 case CC0:
2806 return;
2807
2808 case INSN:
2809 case JUMP_INSN:
2810 case CALL_INSN:
2811 case NOTE:
2812 case LABEL_REF:
2813 case BARRIER:
2814 /* The chain of insns is not being copied. */
2815 return;
2816
2817 default:
2818 break;
2819 }
2820
2821 RTX_FLAG (x, used) = 1;
2822
2823 format_ptr = GET_RTX_FORMAT (code);
2824 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2825 {
2826 switch (*format_ptr++)
2827 {
2828 case 'e':
2829 set_used_flags (XEXP (x, i));
2830 break;
2831
2832 case 'E':
2833 for (j = 0; j < XVECLEN (x, i); j++)
2834 set_used_flags (XVECEXP (x, i, j));
2835 break;
2836 }
2837 }
2838}
23b2ce53
RS
2839\f
2840/* Copy X if necessary so that it won't be altered by changes in OTHER.
2841 Return X or the rtx for the pseudo reg the value of X was copied into.
2842 OTHER must be valid as a SET_DEST. */
2843
2844rtx
502b8322 2845make_safe_from (rtx x, rtx other)
23b2ce53
RS
2846{
2847 while (1)
2848 switch (GET_CODE (other))
2849 {
2850 case SUBREG:
2851 other = SUBREG_REG (other);
2852 break;
2853 case STRICT_LOW_PART:
2854 case SIGN_EXTEND:
2855 case ZERO_EXTEND:
2856 other = XEXP (other, 0);
2857 break;
2858 default:
2859 goto done;
2860 }
2861 done:
3c0cb5de 2862 if ((MEM_P (other)
23b2ce53 2863 && ! CONSTANT_P (x)
f8cfc6aa 2864 && !REG_P (x)
23b2ce53 2865 && GET_CODE (x) != SUBREG)
f8cfc6aa 2866 || (REG_P (other)
23b2ce53
RS
2867 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2868 || reg_mentioned_p (other, x))))
2869 {
2870 rtx temp = gen_reg_rtx (GET_MODE (x));
2871 emit_move_insn (temp, x);
2872 return temp;
2873 }
2874 return x;
2875}
2876\f
2877/* Emission of insns (adding them to the doubly-linked list). */
2878
2879/* Return the first insn of the current sequence or current function. */
2880
2881rtx
502b8322 2882get_insns (void)
23b2ce53
RS
2883{
2884 return first_insn;
2885}
2886
3dec4024
JH
2887/* Specify a new insn as the first in the chain. */
2888
2889void
502b8322 2890set_first_insn (rtx insn)
3dec4024 2891{
5b0264cb 2892 gcc_assert (!PREV_INSN (insn));
3dec4024
JH
2893 first_insn = insn;
2894}
2895
23b2ce53
RS
2896/* Return the last insn emitted in current sequence or current function. */
2897
2898rtx
502b8322 2899get_last_insn (void)
23b2ce53
RS
2900{
2901 return last_insn;
2902}
2903
2904/* Specify a new insn as the last in the chain. */
2905
2906void
502b8322 2907set_last_insn (rtx insn)
23b2ce53 2908{
5b0264cb 2909 gcc_assert (!NEXT_INSN (insn));
23b2ce53
RS
2910 last_insn = insn;
2911}
2912
2913/* Return the last insn emitted, even if it is in a sequence now pushed. */
2914
2915rtx
502b8322 2916get_last_insn_anywhere (void)
23b2ce53
RS
2917{
2918 struct sequence_stack *stack;
2919 if (last_insn)
2920 return last_insn;
49ad7cfa 2921 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2922 if (stack->last != 0)
2923 return stack->last;
2924 return 0;
2925}
2926
2a496e8b
JDA
2927/* Return the first nonnote insn emitted in current sequence or current
2928 function. This routine looks inside SEQUENCEs. */
2929
2930rtx
502b8322 2931get_first_nonnote_insn (void)
2a496e8b 2932{
91373fe8
JDA
2933 rtx insn = first_insn;
2934
2935 if (insn)
2936 {
2937 if (NOTE_P (insn))
2938 for (insn = next_insn (insn);
2939 insn && NOTE_P (insn);
2940 insn = next_insn (insn))
2941 continue;
2942 else
2943 {
2ca202e7 2944 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2945 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2946 insn = XVECEXP (PATTERN (insn), 0, 0);
2947 }
2948 }
2a496e8b
JDA
2949
2950 return insn;
2951}
2952
2953/* Return the last nonnote insn emitted in current sequence or current
2954 function. This routine looks inside SEQUENCEs. */
2955
2956rtx
502b8322 2957get_last_nonnote_insn (void)
2a496e8b 2958{
91373fe8
JDA
2959 rtx insn = last_insn;
2960
2961 if (insn)
2962 {
2963 if (NOTE_P (insn))
2964 for (insn = previous_insn (insn);
2965 insn && NOTE_P (insn);
2966 insn = previous_insn (insn))
2967 continue;
2968 else
2969 {
2ca202e7 2970 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2971 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2972 insn = XVECEXP (PATTERN (insn), 0,
2973 XVECLEN (PATTERN (insn), 0) - 1);
2974 }
2975 }
2a496e8b
JDA
2976
2977 return insn;
2978}
2979
23b2ce53
RS
2980/* Return a number larger than any instruction's uid in this function. */
2981
2982int
502b8322 2983get_max_uid (void)
23b2ce53
RS
2984{
2985 return cur_insn_uid;
2986}
2987\f
2988/* Return the next insn. If it is a SEQUENCE, return the first insn
2989 of the sequence. */
2990
2991rtx
502b8322 2992next_insn (rtx insn)
23b2ce53 2993{
75547801
KG
2994 if (insn)
2995 {
2996 insn = NEXT_INSN (insn);
2997 if (insn && NONJUMP_INSN_P (insn)
2998 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2999 insn = XVECEXP (PATTERN (insn), 0, 0);
3000 }
23b2ce53 3001
75547801 3002 return insn;
23b2ce53
RS
3003}
3004
3005/* Return the previous insn. If it is a SEQUENCE, return the last insn
3006 of the sequence. */
3007
3008rtx
502b8322 3009previous_insn (rtx insn)
23b2ce53 3010{
75547801
KG
3011 if (insn)
3012 {
3013 insn = PREV_INSN (insn);
3014 if (insn && NONJUMP_INSN_P (insn)
3015 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3016 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3017 }
23b2ce53 3018
75547801 3019 return insn;
23b2ce53
RS
3020}
3021
3022/* Return the next insn after INSN that is not a NOTE. This routine does not
3023 look inside SEQUENCEs. */
3024
3025rtx
502b8322 3026next_nonnote_insn (rtx insn)
23b2ce53 3027{
75547801
KG
3028 while (insn)
3029 {
3030 insn = NEXT_INSN (insn);
3031 if (insn == 0 || !NOTE_P (insn))
3032 break;
3033 }
23b2ce53 3034
75547801 3035 return insn;
23b2ce53
RS
3036}
3037
3038/* Return the previous insn before INSN that is not a NOTE. This routine does
3039 not look inside SEQUENCEs. */
3040
3041rtx
502b8322 3042prev_nonnote_insn (rtx insn)
23b2ce53 3043{
75547801
KG
3044 while (insn)
3045 {
3046 insn = PREV_INSN (insn);
3047 if (insn == 0 || !NOTE_P (insn))
3048 break;
3049 }
23b2ce53 3050
75547801 3051 return insn;
23b2ce53
RS
3052}
3053
3054/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3055 or 0, if there is none. This routine does not look inside
0f41302f 3056 SEQUENCEs. */
23b2ce53
RS
3057
3058rtx
502b8322 3059next_real_insn (rtx insn)
23b2ce53 3060{
75547801
KG
3061 while (insn)
3062 {
3063 insn = NEXT_INSN (insn);
3064 if (insn == 0 || INSN_P (insn))
3065 break;
3066 }
23b2ce53 3067
75547801 3068 return insn;
23b2ce53
RS
3069}
3070
3071/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3072 or 0, if there is none. This routine does not look inside
3073 SEQUENCEs. */
3074
3075rtx
502b8322 3076prev_real_insn (rtx insn)
23b2ce53 3077{
75547801
KG
3078 while (insn)
3079 {
3080 insn = PREV_INSN (insn);
3081 if (insn == 0 || INSN_P (insn))
3082 break;
3083 }
23b2ce53 3084
75547801 3085 return insn;
23b2ce53
RS
3086}
3087
ee960939
OH
3088/* Return the last CALL_INSN in the current list, or 0 if there is none.
3089 This routine does not look inside SEQUENCEs. */
3090
3091rtx
502b8322 3092last_call_insn (void)
ee960939
OH
3093{
3094 rtx insn;
3095
3096 for (insn = get_last_insn ();
4b4bf941 3097 insn && !CALL_P (insn);
ee960939
OH
3098 insn = PREV_INSN (insn))
3099 ;
3100
3101 return insn;
3102}
3103
23b2ce53
RS
3104/* Find the next insn after INSN that really does something. This routine
3105 does not look inside SEQUENCEs. Until reload has completed, this is the
3106 same as next_real_insn. */
3107
69732dcb 3108int
4f588890 3109active_insn_p (const_rtx insn)
69732dcb 3110{
4b4bf941
JQ
3111 return (CALL_P (insn) || JUMP_P (insn)
3112 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3113 && (! reload_completed
3114 || (GET_CODE (PATTERN (insn)) != USE
3115 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3116}
3117
23b2ce53 3118rtx
502b8322 3119next_active_insn (rtx insn)
23b2ce53 3120{
75547801
KG
3121 while (insn)
3122 {
3123 insn = NEXT_INSN (insn);
3124 if (insn == 0 || active_insn_p (insn))
3125 break;
3126 }
23b2ce53 3127
75547801 3128 return insn;
23b2ce53
RS
3129}
3130
3131/* Find the last insn before INSN that really does something. This routine
3132 does not look inside SEQUENCEs. Until reload has completed, this is the
3133 same as prev_real_insn. */
3134
3135rtx
502b8322 3136prev_active_insn (rtx insn)
23b2ce53 3137{
75547801
KG
3138 while (insn)
3139 {
3140 insn = PREV_INSN (insn);
3141 if (insn == 0 || active_insn_p (insn))
3142 break;
3143 }
23b2ce53 3144
75547801 3145 return insn;
23b2ce53
RS
3146}
3147
3148/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3149
3150rtx
502b8322 3151next_label (rtx insn)
23b2ce53 3152{
75547801
KG
3153 while (insn)
3154 {
3155 insn = NEXT_INSN (insn);
3156 if (insn == 0 || LABEL_P (insn))
3157 break;
3158 }
23b2ce53 3159
75547801 3160 return insn;
23b2ce53
RS
3161}
3162
3163/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3164
3165rtx
502b8322 3166prev_label (rtx insn)
23b2ce53 3167{
75547801
KG
3168 while (insn)
3169 {
3170 insn = PREV_INSN (insn);
3171 if (insn == 0 || LABEL_P (insn))
3172 break;
3173 }
23b2ce53 3174
75547801 3175 return insn;
23b2ce53 3176}
6c2511d3
RS
3177
3178/* Return the last label to mark the same position as LABEL. Return null
3179 if LABEL itself is null. */
3180
3181rtx
3182skip_consecutive_labels (rtx label)
3183{
3184 rtx insn;
3185
3186 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3187 if (LABEL_P (insn))
3188 label = insn;
3189
3190 return label;
3191}
23b2ce53
RS
3192\f
3193#ifdef HAVE_cc0
c572e5ba
JVA
3194/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3195 and REG_CC_USER notes so we can find it. */
3196
3197void
502b8322 3198link_cc0_insns (rtx insn)
c572e5ba
JVA
3199{
3200 rtx user = next_nonnote_insn (insn);
3201
4b4bf941 3202 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3203 user = XVECEXP (PATTERN (user), 0, 0);
3204
65c5f2a6
ILT
3205 add_reg_note (user, REG_CC_SETTER, insn);
3206 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3207}
3208
23b2ce53
RS
3209/* Return the next insn that uses CC0 after INSN, which is assumed to
3210 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3211 applied to the result of this function should yield INSN).
3212
3213 Normally, this is simply the next insn. However, if a REG_CC_USER note
3214 is present, it contains the insn that uses CC0.
3215
3216 Return 0 if we can't find the insn. */
3217
3218rtx
502b8322 3219next_cc0_user (rtx insn)
23b2ce53 3220{
906c4e36 3221 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3222
3223 if (note)
3224 return XEXP (note, 0);
3225
3226 insn = next_nonnote_insn (insn);
4b4bf941 3227 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3228 insn = XVECEXP (PATTERN (insn), 0, 0);
3229
2c3c49de 3230 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3231 return insn;
3232
3233 return 0;
3234}
3235
3236/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3237 note, it is the previous insn. */
3238
3239rtx
502b8322 3240prev_cc0_setter (rtx insn)
23b2ce53 3241{
906c4e36 3242 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3243
3244 if (note)
3245 return XEXP (note, 0);
3246
3247 insn = prev_nonnote_insn (insn);
5b0264cb 3248 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3249
3250 return insn;
3251}
3252#endif
e5bef2e4 3253
594f8779
RZ
3254#ifdef AUTO_INC_DEC
3255/* Find a RTX_AUTOINC class rtx which matches DATA. */
3256
3257static int
3258find_auto_inc (rtx *xp, void *data)
3259{
3260 rtx x = *xp;
5ead67f6 3261 rtx reg = (rtx) data;
594f8779
RZ
3262
3263 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3264 return 0;
3265
3266 switch (GET_CODE (x))
3267 {
3268 case PRE_DEC:
3269 case PRE_INC:
3270 case POST_DEC:
3271 case POST_INC:
3272 case PRE_MODIFY:
3273 case POST_MODIFY:
3274 if (rtx_equal_p (reg, XEXP (x, 0)))
3275 return 1;
3276 break;
3277
3278 default:
3279 gcc_unreachable ();
3280 }
3281 return -1;
3282}
3283#endif
3284
e5bef2e4
HB
3285/* Increment the label uses for all labels present in rtx. */
3286
3287static void
502b8322 3288mark_label_nuses (rtx x)
e5bef2e4 3289{
b3694847
SS
3290 enum rtx_code code;
3291 int i, j;
3292 const char *fmt;
e5bef2e4
HB
3293
3294 code = GET_CODE (x);
7537fc90 3295 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3296 LABEL_NUSES (XEXP (x, 0))++;
3297
3298 fmt = GET_RTX_FORMAT (code);
3299 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3300 {
3301 if (fmt[i] == 'e')
0fb7aeda 3302 mark_label_nuses (XEXP (x, i));
e5bef2e4 3303 else if (fmt[i] == 'E')
0fb7aeda 3304 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3305 mark_label_nuses (XVECEXP (x, i, j));
3306 }
3307}
3308
23b2ce53
RS
3309\f
3310/* Try splitting insns that can be split for better scheduling.
3311 PAT is the pattern which might split.
3312 TRIAL is the insn providing PAT.
cc2902df 3313 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3314
3315 If this routine succeeds in splitting, it returns the first or last
11147ebe 3316 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3317 returns TRIAL. If the insn to be returned can be split, it will be. */
3318
3319rtx
502b8322 3320try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3321{
3322 rtx before = PREV_INSN (trial);
3323 rtx after = NEXT_INSN (trial);
23b2ce53 3324 int has_barrier = 0;
4a8cae83 3325 rtx note, seq, tem;
6b24c259 3326 int probability;
599aedd9
RH
3327 rtx insn_last, insn;
3328 int njumps = 0;
6b24c259
JH
3329
3330 if (any_condjump_p (trial)
3331 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3332 split_branch_probability = INTVAL (XEXP (note, 0));
3333 probability = split_branch_probability;
3334
3335 seq = split_insns (pat, trial);
3336
3337 split_branch_probability = -1;
23b2ce53
RS
3338
3339 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3340 We may need to handle this specially. */
4b4bf941 3341 if (after && BARRIER_P (after))
23b2ce53
RS
3342 {
3343 has_barrier = 1;
3344 after = NEXT_INSN (after);
3345 }
3346
599aedd9
RH
3347 if (!seq)
3348 return trial;
3349
3350 /* Avoid infinite loop if any insn of the result matches
3351 the original pattern. */
3352 insn_last = seq;
3353 while (1)
23b2ce53 3354 {
599aedd9
RH
3355 if (INSN_P (insn_last)
3356 && rtx_equal_p (PATTERN (insn_last), pat))
3357 return trial;
3358 if (!NEXT_INSN (insn_last))
3359 break;
3360 insn_last = NEXT_INSN (insn_last);
3361 }
750c9258 3362
6fb5fa3c
DB
3363 /* We will be adding the new sequence to the function. The splitters
3364 may have introduced invalid RTL sharing, so unshare the sequence now. */
3365 unshare_all_rtl_in_chain (seq);
3366
599aedd9
RH
3367 /* Mark labels. */
3368 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3369 {
4b4bf941 3370 if (JUMP_P (insn))
599aedd9
RH
3371 {
3372 mark_jump_label (PATTERN (insn), insn, 0);
3373 njumps++;
3374 if (probability != -1
3375 && any_condjump_p (insn)
3376 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3377 {
599aedd9
RH
3378 /* We can preserve the REG_BR_PROB notes only if exactly
3379 one jump is created, otherwise the machine description
3380 is responsible for this step using
3381 split_branch_probability variable. */
5b0264cb 3382 gcc_assert (njumps == 1);
65c5f2a6 3383 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3384 }
599aedd9
RH
3385 }
3386 }
3387
3388 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3389 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
4b4bf941 3390 if (CALL_P (trial))
599aedd9
RH
3391 {
3392 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3393 if (CALL_P (insn))
599aedd9 3394 {
f6a1f3f6
RH
3395 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3396 while (*p)
3397 p = &XEXP (*p, 1);
3398 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9
RH
3399 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3400 }
3401 }
4b5e8abe 3402
599aedd9
RH
3403 /* Copy notes, particularly those related to the CFG. */
3404 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3405 {
3406 switch (REG_NOTE_KIND (note))
3407 {
3408 case REG_EH_REGION:
594f8779 3409 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3410 {
4b4bf941 3411 if (CALL_P (insn)
d3a583b1 3412 || (flag_non_call_exceptions && INSN_P (insn)
599aedd9 3413 && may_trap_p (PATTERN (insn))))
65c5f2a6 3414 add_reg_note (insn, REG_EH_REGION, XEXP (note, 0));
2f937369 3415 }
599aedd9 3416 break;
216183ce 3417
599aedd9
RH
3418 case REG_NORETURN:
3419 case REG_SETJMP:
594f8779 3420 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3421 {
4b4bf941 3422 if (CALL_P (insn))
65c5f2a6 3423 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3424 }
599aedd9 3425 break;
d6e95df8 3426
599aedd9 3427 case REG_NON_LOCAL_GOTO:
594f8779 3428 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3429 {
4b4bf941 3430 if (JUMP_P (insn))
65c5f2a6 3431 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3432 }
599aedd9 3433 break;
e5bef2e4 3434
594f8779
RZ
3435#ifdef AUTO_INC_DEC
3436 case REG_INC:
3437 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3438 {
3439 rtx reg = XEXP (note, 0);
3440 if (!FIND_REG_INC_NOTE (insn, reg)
3441 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3442 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3443 }
3444 break;
3445#endif
3446
599aedd9
RH
3447 default:
3448 break;
23b2ce53 3449 }
599aedd9
RH
3450 }
3451
3452 /* If there are LABELS inside the split insns increment the
3453 usage count so we don't delete the label. */
cf7c4aa6 3454 if (INSN_P (trial))
599aedd9
RH
3455 {
3456 insn = insn_last;
3457 while (insn != NULL_RTX)
23b2ce53 3458 {
cf7c4aa6 3459 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3460 if (NONJUMP_INSN_P (insn))
599aedd9 3461 mark_label_nuses (PATTERN (insn));
23b2ce53 3462
599aedd9
RH
3463 insn = PREV_INSN (insn);
3464 }
23b2ce53
RS
3465 }
3466
0435312e 3467 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3468
3469 delete_insn (trial);
3470 if (has_barrier)
3471 emit_barrier_after (tem);
3472
3473 /* Recursively call try_split for each new insn created; by the
3474 time control returns here that insn will be fully split, so
3475 set LAST and continue from the insn after the one returned.
3476 We can't use next_active_insn here since AFTER may be a note.
3477 Ignore deleted insns, which can be occur if not optimizing. */
3478 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3479 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3480 tem = try_split (PATTERN (tem), tem, 1);
3481
3482 /* Return either the first or the last insn, depending on which was
3483 requested. */
3484 return last
3485 ? (after ? PREV_INSN (after) : last_insn)
3486 : NEXT_INSN (before);
23b2ce53
RS
3487}
3488\f
3489/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3490 Store PATTERN in the pattern slots. */
23b2ce53
RS
3491
3492rtx
502b8322 3493make_insn_raw (rtx pattern)
23b2ce53 3494{
b3694847 3495 rtx insn;
23b2ce53 3496
1f8f4a0b 3497 insn = rtx_alloc (INSN);
23b2ce53 3498
43127294 3499 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3500 PATTERN (insn) = pattern;
3501 INSN_CODE (insn) = -1;
1632afca 3502 REG_NOTES (insn) = NULL;
55e092c4 3503 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3504 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3505
47984720
NC
3506#ifdef ENABLE_RTL_CHECKING
3507 if (insn
2c3c49de 3508 && INSN_P (insn)
47984720
NC
3509 && (returnjump_p (insn)
3510 || (GET_CODE (insn) == SET
3511 && SET_DEST (insn) == pc_rtx)))
3512 {
d4ee4d25 3513 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3514 debug_rtx (insn);
3515 }
3516#endif
750c9258 3517
23b2ce53
RS
3518 return insn;
3519}
3520
2f937369 3521/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3522
38109dab 3523rtx
502b8322 3524make_jump_insn_raw (rtx pattern)
23b2ce53 3525{
b3694847 3526 rtx insn;
23b2ce53 3527
4b1f5e8c 3528 insn = rtx_alloc (JUMP_INSN);
1632afca 3529 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3530
3531 PATTERN (insn) = pattern;
3532 INSN_CODE (insn) = -1;
1632afca
RS
3533 REG_NOTES (insn) = NULL;
3534 JUMP_LABEL (insn) = NULL;
55e092c4 3535 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3536 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3537
3538 return insn;
3539}
aff507f4 3540
2f937369 3541/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3542
3543static rtx
502b8322 3544make_call_insn_raw (rtx pattern)
aff507f4 3545{
b3694847 3546 rtx insn;
aff507f4
RK
3547
3548 insn = rtx_alloc (CALL_INSN);
3549 INSN_UID (insn) = cur_insn_uid++;
3550
3551 PATTERN (insn) = pattern;
3552 INSN_CODE (insn) = -1;
aff507f4
RK
3553 REG_NOTES (insn) = NULL;
3554 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
55e092c4 3555 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3556 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3557
3558 return insn;
3559}
23b2ce53
RS
3560\f
3561/* Add INSN to the end of the doubly-linked list.
3562 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3563
3564void
502b8322 3565add_insn (rtx insn)
23b2ce53
RS
3566{
3567 PREV_INSN (insn) = last_insn;
3568 NEXT_INSN (insn) = 0;
3569
3570 if (NULL != last_insn)
3571 NEXT_INSN (last_insn) = insn;
3572
3573 if (NULL == first_insn)
3574 first_insn = insn;
3575
3576 last_insn = insn;
3577}
3578
a0ae8e8d
RK
3579/* Add INSN into the doubly-linked list after insn AFTER. This and
3580 the next should be the only functions called to insert an insn once
ba213285 3581 delay slots have been filled since only they know how to update a
a0ae8e8d 3582 SEQUENCE. */
23b2ce53
RS
3583
3584void
6fb5fa3c 3585add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3586{
3587 rtx next = NEXT_INSN (after);
3588
5b0264cb 3589 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3590
23b2ce53
RS
3591 NEXT_INSN (insn) = next;
3592 PREV_INSN (insn) = after;
3593
3594 if (next)
3595 {
3596 PREV_INSN (next) = insn;
4b4bf941 3597 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3598 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3599 }
3600 else if (last_insn == after)
3601 last_insn = insn;
3602 else
3603 {
49ad7cfa 3604 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3605 /* Scan all pending sequences too. */
3606 for (; stack; stack = stack->next)
3607 if (after == stack->last)
fef0509b
RK
3608 {
3609 stack->last = insn;
3610 break;
3611 }
a0ae8e8d 3612
5b0264cb 3613 gcc_assert (stack);
23b2ce53
RS
3614 }
3615
4b4bf941
JQ
3616 if (!BARRIER_P (after)
3617 && !BARRIER_P (insn)
3c030e88
JH
3618 && (bb = BLOCK_FOR_INSN (after)))
3619 {
3620 set_block_for_insn (insn, bb);
38c1593d 3621 if (INSN_P (insn))
6fb5fa3c 3622 df_insn_rescan (insn);
3c030e88 3623 /* Should not happen as first in the BB is always
a1f300c0 3624 either NOTE or LABEL. */
a813c111 3625 if (BB_END (bb) == after
3c030e88 3626 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3627 && !BARRIER_P (insn)
a38e7aa5 3628 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3629 BB_END (bb) = insn;
3c030e88
JH
3630 }
3631
23b2ce53 3632 NEXT_INSN (after) = insn;
4b4bf941 3633 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3634 {
3635 rtx sequence = PATTERN (after);
3636 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3637 }
3638}
3639
a0ae8e8d 3640/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3641 the previous should be the only functions called to insert an insn
3642 once delay slots have been filled since only they know how to
3643 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3644 bb from before. */
a0ae8e8d
RK
3645
3646void
6fb5fa3c 3647add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3648{
3649 rtx prev = PREV_INSN (before);
3650
5b0264cb 3651 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3652
a0ae8e8d
RK
3653 PREV_INSN (insn) = prev;
3654 NEXT_INSN (insn) = before;
3655
3656 if (prev)
3657 {
3658 NEXT_INSN (prev) = insn;
4b4bf941 3659 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3660 {
3661 rtx sequence = PATTERN (prev);
3662 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3663 }
3664 }
3665 else if (first_insn == before)
3666 first_insn = insn;
3667 else
3668 {
49ad7cfa 3669 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3670 /* Scan all pending sequences too. */
3671 for (; stack; stack = stack->next)
3672 if (before == stack->first)
fef0509b
RK
3673 {
3674 stack->first = insn;
3675 break;
3676 }
a0ae8e8d 3677
5b0264cb 3678 gcc_assert (stack);
a0ae8e8d
RK
3679 }
3680
6fb5fa3c
DB
3681 if (!bb
3682 && !BARRIER_P (before)
3683 && !BARRIER_P (insn))
3684 bb = BLOCK_FOR_INSN (before);
3685
3686 if (bb)
3c030e88
JH
3687 {
3688 set_block_for_insn (insn, bb);
38c1593d 3689 if (INSN_P (insn))
6fb5fa3c 3690 df_insn_rescan (insn);
5b0264cb 3691 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3692 LABEL. */
5b0264cb
NS
3693 gcc_assert (BB_HEAD (bb) != insn
3694 /* Avoid clobbering of structure when creating new BB. */
3695 || BARRIER_P (insn)
a38e7aa5 3696 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3697 }
3698
a0ae8e8d 3699 PREV_INSN (before) = insn;
4b4bf941 3700 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3701 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3702}
3703
6fb5fa3c
DB
3704
3705/* Replace insn with an deleted instruction note. */
3706
0ce2b299
EB
3707void
3708set_insn_deleted (rtx insn)
6fb5fa3c
DB
3709{
3710 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3711 PUT_CODE (insn, NOTE);
3712 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3713}
3714
3715
89e99eea
DB
3716/* Remove an insn from its doubly-linked list. This function knows how
3717 to handle sequences. */
3718void
502b8322 3719remove_insn (rtx insn)
89e99eea
DB
3720{
3721 rtx next = NEXT_INSN (insn);
3722 rtx prev = PREV_INSN (insn);
53c17031
JH
3723 basic_block bb;
3724
6fb5fa3c
DB
3725 /* Later in the code, the block will be marked dirty. */
3726 df_insn_delete (NULL, INSN_UID (insn));
3727
89e99eea
DB
3728 if (prev)
3729 {
3730 NEXT_INSN (prev) = next;
4b4bf941 3731 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3732 {
3733 rtx sequence = PATTERN (prev);
3734 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3735 }
3736 }
3737 else if (first_insn == insn)
3738 first_insn = next;
3739 else
3740 {
49ad7cfa 3741 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3742 /* Scan all pending sequences too. */
3743 for (; stack; stack = stack->next)
3744 if (insn == stack->first)
3745 {
3746 stack->first = next;
3747 break;
3748 }
3749
5b0264cb 3750 gcc_assert (stack);
89e99eea
DB
3751 }
3752
3753 if (next)
3754 {
3755 PREV_INSN (next) = prev;
4b4bf941 3756 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3757 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3758 }
3759 else if (last_insn == insn)
3760 last_insn = prev;
3761 else
3762 {
49ad7cfa 3763 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3764 /* Scan all pending sequences too. */
3765 for (; stack; stack = stack->next)
3766 if (insn == stack->last)
3767 {
3768 stack->last = prev;
3769 break;
3770 }
3771
5b0264cb 3772 gcc_assert (stack);
89e99eea 3773 }
4b4bf941 3774 if (!BARRIER_P (insn)
53c17031
JH
3775 && (bb = BLOCK_FOR_INSN (insn)))
3776 {
38c1593d 3777 if (INSN_P (insn))
6fb5fa3c 3778 df_set_bb_dirty (bb);
a813c111 3779 if (BB_HEAD (bb) == insn)
53c17031 3780 {
3bf1e984
RK
3781 /* Never ever delete the basic block note without deleting whole
3782 basic block. */
5b0264cb 3783 gcc_assert (!NOTE_P (insn));
a813c111 3784 BB_HEAD (bb) = next;
53c17031 3785 }
a813c111
SB
3786 if (BB_END (bb) == insn)
3787 BB_END (bb) = prev;
53c17031 3788 }
89e99eea
DB
3789}
3790
ee960939
OH
3791/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3792
3793void
502b8322 3794add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 3795{
5b0264cb 3796 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
3797
3798 /* Put the register usage information on the CALL. If there is already
3799 some usage information, put ours at the end. */
3800 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3801 {
3802 rtx link;
3803
3804 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3805 link = XEXP (link, 1))
3806 ;
3807
3808 XEXP (link, 1) = call_fusage;
3809 }
3810 else
3811 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3812}
3813
23b2ce53
RS
3814/* Delete all insns made since FROM.
3815 FROM becomes the new last instruction. */
3816
3817void
502b8322 3818delete_insns_since (rtx from)
23b2ce53
RS
3819{
3820 if (from == 0)
3821 first_insn = 0;
3822 else
3823 NEXT_INSN (from) = 0;
3824 last_insn = from;
3825}
3826
5dab5552
MS
3827/* This function is deprecated, please use sequences instead.
3828
3829 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3830 The insns to be moved are those between FROM and TO.
3831 They are moved to a new position after the insn AFTER.
3832 AFTER must not be FROM or TO or any insn in between.
3833
3834 This function does not know about SEQUENCEs and hence should not be
3835 called after delay-slot filling has been done. */
3836
3837void
502b8322 3838reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53
RS
3839{
3840 /* Splice this bunch out of where it is now. */
3841 if (PREV_INSN (from))
3842 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3843 if (NEXT_INSN (to))
3844 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3845 if (last_insn == to)
3846 last_insn = PREV_INSN (from);
3847 if (first_insn == from)
3848 first_insn = NEXT_INSN (to);
3849
3850 /* Make the new neighbors point to it and it to them. */
3851 if (NEXT_INSN (after))
3852 PREV_INSN (NEXT_INSN (after)) = to;
3853
3854 NEXT_INSN (to) = NEXT_INSN (after);
3855 PREV_INSN (from) = after;
3856 NEXT_INSN (after) = from;
3857 if (after == last_insn)
3858 last_insn = to;
3859}
3860
3c030e88
JH
3861/* Same as function above, but take care to update BB boundaries. */
3862void
502b8322 3863reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
3864{
3865 rtx prev = PREV_INSN (from);
3866 basic_block bb, bb2;
3867
3868 reorder_insns_nobb (from, to, after);
3869
4b4bf941 3870 if (!BARRIER_P (after)
3c030e88
JH
3871 && (bb = BLOCK_FOR_INSN (after)))
3872 {
3873 rtx x;
6fb5fa3c 3874 df_set_bb_dirty (bb);
68252e27 3875
4b4bf941 3876 if (!BARRIER_P (from)
3c030e88
JH
3877 && (bb2 = BLOCK_FOR_INSN (from)))
3878 {
a813c111
SB
3879 if (BB_END (bb2) == to)
3880 BB_END (bb2) = prev;
6fb5fa3c 3881 df_set_bb_dirty (bb2);
3c030e88
JH
3882 }
3883
a813c111
SB
3884 if (BB_END (bb) == after)
3885 BB_END (bb) = to;
3c030e88
JH
3886
3887 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 3888 if (!BARRIER_P (x))
63642d5a 3889 df_insn_change_bb (x, bb);
3c030e88
JH
3890 }
3891}
3892
23b2ce53 3893\f
2f937369
DM
3894/* Emit insn(s) of given code and pattern
3895 at a specified place within the doubly-linked list.
23b2ce53 3896
2f937369
DM
3897 All of the emit_foo global entry points accept an object
3898 X which is either an insn list or a PATTERN of a single
3899 instruction.
23b2ce53 3900
2f937369
DM
3901 There are thus a few canonical ways to generate code and
3902 emit it at a specific place in the instruction stream. For
3903 example, consider the instruction named SPOT and the fact that
3904 we would like to emit some instructions before SPOT. We might
3905 do it like this:
23b2ce53 3906
2f937369
DM
3907 start_sequence ();
3908 ... emit the new instructions ...
3909 insns_head = get_insns ();
3910 end_sequence ();
23b2ce53 3911
2f937369 3912 emit_insn_before (insns_head, SPOT);
23b2ce53 3913
2f937369
DM
3914 It used to be common to generate SEQUENCE rtl instead, but that
3915 is a relic of the past which no longer occurs. The reason is that
3916 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3917 generated would almost certainly die right after it was created. */
23b2ce53 3918
2f937369 3919/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
3920
3921rtx
6fb5fa3c 3922emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
23b2ce53 3923{
2f937369 3924 rtx last = before;
b3694847 3925 rtx insn;
23b2ce53 3926
5b0264cb 3927 gcc_assert (before);
2f937369
DM
3928
3929 if (x == NULL_RTX)
3930 return last;
3931
3932 switch (GET_CODE (x))
23b2ce53 3933 {
2f937369
DM
3934 case INSN:
3935 case JUMP_INSN:
3936 case CALL_INSN:
3937 case CODE_LABEL:
3938 case BARRIER:
3939 case NOTE:
3940 insn = x;
3941 while (insn)
3942 {
3943 rtx next = NEXT_INSN (insn);
6fb5fa3c 3944 add_insn_before (insn, before, bb);
2f937369
DM
3945 last = insn;
3946 insn = next;
3947 }
3948 break;
3949
3950#ifdef ENABLE_RTL_CHECKING
3951 case SEQUENCE:
5b0264cb 3952 gcc_unreachable ();
2f937369
DM
3953 break;
3954#endif
3955
3956 default:
3957 last = make_insn_raw (x);
6fb5fa3c 3958 add_insn_before (last, before, bb);
2f937369 3959 break;
23b2ce53
RS
3960 }
3961
2f937369 3962 return last;
23b2ce53
RS
3963}
3964
2f937369 3965/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
3966 and output it before the instruction BEFORE. */
3967
3968rtx
a7102479 3969emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 3970{
d950dee3 3971 rtx insn, last = NULL_RTX;
aff507f4 3972
5b0264cb 3973 gcc_assert (before);
2f937369
DM
3974
3975 switch (GET_CODE (x))
aff507f4 3976 {
2f937369
DM
3977 case INSN:
3978 case JUMP_INSN:
3979 case CALL_INSN:
3980 case CODE_LABEL:
3981 case BARRIER:
3982 case NOTE:
3983 insn = x;
3984 while (insn)
3985 {
3986 rtx next = NEXT_INSN (insn);
6fb5fa3c 3987 add_insn_before (insn, before, NULL);
2f937369
DM
3988 last = insn;
3989 insn = next;
3990 }
3991 break;
3992
3993#ifdef ENABLE_RTL_CHECKING
3994 case SEQUENCE:
5b0264cb 3995 gcc_unreachable ();
2f937369
DM
3996 break;
3997#endif
3998
3999 default:
4000 last = make_jump_insn_raw (x);
6fb5fa3c 4001 add_insn_before (last, before, NULL);
2f937369 4002 break;
aff507f4
RK
4003 }
4004
2f937369 4005 return last;
23b2ce53
RS
4006}
4007
2f937369 4008/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4009 and output it before the instruction BEFORE. */
4010
4011rtx
a7102479 4012emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4013{
d950dee3 4014 rtx last = NULL_RTX, insn;
969d70ca 4015
5b0264cb 4016 gcc_assert (before);
2f937369
DM
4017
4018 switch (GET_CODE (x))
969d70ca 4019 {
2f937369
DM
4020 case INSN:
4021 case JUMP_INSN:
4022 case CALL_INSN:
4023 case CODE_LABEL:
4024 case BARRIER:
4025 case NOTE:
4026 insn = x;
4027 while (insn)
4028 {
4029 rtx next = NEXT_INSN (insn);
6fb5fa3c 4030 add_insn_before (insn, before, NULL);
2f937369
DM
4031 last = insn;
4032 insn = next;
4033 }
4034 break;
4035
4036#ifdef ENABLE_RTL_CHECKING
4037 case SEQUENCE:
5b0264cb 4038 gcc_unreachable ();
2f937369
DM
4039 break;
4040#endif
4041
4042 default:
4043 last = make_call_insn_raw (x);
6fb5fa3c 4044 add_insn_before (last, before, NULL);
2f937369 4045 break;
969d70ca
JH
4046 }
4047
2f937369 4048 return last;
969d70ca
JH
4049}
4050
23b2ce53 4051/* Make an insn of code BARRIER
e881bb1b 4052 and output it before the insn BEFORE. */
23b2ce53
RS
4053
4054rtx
502b8322 4055emit_barrier_before (rtx before)
23b2ce53 4056{
b3694847 4057 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4058
4059 INSN_UID (insn) = cur_insn_uid++;
4060
6fb5fa3c 4061 add_insn_before (insn, before, NULL);
23b2ce53
RS
4062 return insn;
4063}
4064
e881bb1b
RH
4065/* Emit the label LABEL before the insn BEFORE. */
4066
4067rtx
502b8322 4068emit_label_before (rtx label, rtx before)
e881bb1b
RH
4069{
4070 /* This can be called twice for the same label as a result of the
4071 confusion that follows a syntax error! So make it harmless. */
4072 if (INSN_UID (label) == 0)
4073 {
4074 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4075 add_insn_before (label, before, NULL);
e881bb1b
RH
4076 }
4077
4078 return label;
4079}
4080
23b2ce53
RS
4081/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4082
4083rtx
a38e7aa5 4084emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 4085{
b3694847 4086 rtx note = rtx_alloc (NOTE);
23b2ce53 4087 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4088 NOTE_KIND (note) = subtype;
ba4f7968 4089 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4090 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 4091
6fb5fa3c 4092 add_insn_before (note, before, NULL);
23b2ce53
RS
4093 return note;
4094}
4095\f
2f937369
DM
4096/* Helper for emit_insn_after, handles lists of instructions
4097 efficiently. */
23b2ce53 4098
2f937369 4099static rtx
6fb5fa3c 4100emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4101{
2f937369
DM
4102 rtx last;
4103 rtx after_after;
6fb5fa3c
DB
4104 if (!bb && !BARRIER_P (after))
4105 bb = BLOCK_FOR_INSN (after);
23b2ce53 4106
6fb5fa3c 4107 if (bb)
23b2ce53 4108 {
6fb5fa3c 4109 df_set_bb_dirty (bb);
2f937369 4110 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4111 if (!BARRIER_P (last))
6fb5fa3c
DB
4112 {
4113 set_block_for_insn (last, bb);
4114 df_insn_rescan (last);
4115 }
4b4bf941 4116 if (!BARRIER_P (last))
6fb5fa3c
DB
4117 {
4118 set_block_for_insn (last, bb);
4119 df_insn_rescan (last);
4120 }
a813c111
SB
4121 if (BB_END (bb) == after)
4122 BB_END (bb) = last;
23b2ce53
RS
4123 }
4124 else
2f937369
DM
4125 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4126 continue;
4127
4128 after_after = NEXT_INSN (after);
4129
4130 NEXT_INSN (after) = first;
4131 PREV_INSN (first) = after;
4132 NEXT_INSN (last) = after_after;
4133 if (after_after)
4134 PREV_INSN (after_after) = last;
4135
4136 if (after == last_insn)
4137 last_insn = last;
e855c69d 4138
2f937369
DM
4139 return last;
4140}
4141
6fb5fa3c
DB
4142/* Make X be output after the insn AFTER and set the BB of insn. If
4143 BB is NULL, an attempt is made to infer the BB from AFTER. */
2f937369
DM
4144
4145rtx
6fb5fa3c 4146emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
2f937369
DM
4147{
4148 rtx last = after;
4149
5b0264cb 4150 gcc_assert (after);
2f937369
DM
4151
4152 if (x == NULL_RTX)
4153 return last;
4154
4155 switch (GET_CODE (x))
23b2ce53 4156 {
2f937369
DM
4157 case INSN:
4158 case JUMP_INSN:
4159 case CALL_INSN:
4160 case CODE_LABEL:
4161 case BARRIER:
4162 case NOTE:
6fb5fa3c 4163 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4164 break;
4165
4166#ifdef ENABLE_RTL_CHECKING
4167 case SEQUENCE:
5b0264cb 4168 gcc_unreachable ();
2f937369
DM
4169 break;
4170#endif
4171
4172 default:
4173 last = make_insn_raw (x);
6fb5fa3c 4174 add_insn_after (last, after, bb);
2f937369 4175 break;
23b2ce53
RS
4176 }
4177
2f937369 4178 return last;
23b2ce53
RS
4179}
4180
255680cf 4181
2f937369 4182/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4183 and output it after the insn AFTER. */
4184
4185rtx
a7102479 4186emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4187{
2f937369 4188 rtx last;
23b2ce53 4189
5b0264cb 4190 gcc_assert (after);
2f937369
DM
4191
4192 switch (GET_CODE (x))
23b2ce53 4193 {
2f937369
DM
4194 case INSN:
4195 case JUMP_INSN:
4196 case CALL_INSN:
4197 case CODE_LABEL:
4198 case BARRIER:
4199 case NOTE:
6fb5fa3c 4200 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4201 break;
4202
4203#ifdef ENABLE_RTL_CHECKING
4204 case SEQUENCE:
5b0264cb 4205 gcc_unreachable ();
2f937369
DM
4206 break;
4207#endif
4208
4209 default:
4210 last = make_jump_insn_raw (x);
6fb5fa3c 4211 add_insn_after (last, after, NULL);
2f937369 4212 break;
23b2ce53
RS
4213 }
4214
2f937369
DM
4215 return last;
4216}
4217
4218/* Make an instruction with body X and code CALL_INSN
4219 and output it after the instruction AFTER. */
4220
4221rtx
a7102479 4222emit_call_insn_after_noloc (rtx x, rtx after)
2f937369
DM
4223{
4224 rtx last;
4225
5b0264cb 4226 gcc_assert (after);
2f937369
DM
4227
4228 switch (GET_CODE (x))
4229 {
4230 case INSN:
4231 case JUMP_INSN:
4232 case CALL_INSN:
4233 case CODE_LABEL:
4234 case BARRIER:
4235 case NOTE:
6fb5fa3c 4236 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4237 break;
4238
4239#ifdef ENABLE_RTL_CHECKING
4240 case SEQUENCE:
5b0264cb 4241 gcc_unreachable ();
2f937369
DM
4242 break;
4243#endif
4244
4245 default:
4246 last = make_call_insn_raw (x);
6fb5fa3c 4247 add_insn_after (last, after, NULL);
2f937369
DM
4248 break;
4249 }
4250
4251 return last;
23b2ce53
RS
4252}
4253
4254/* Make an insn of code BARRIER
4255 and output it after the insn AFTER. */
4256
4257rtx
502b8322 4258emit_barrier_after (rtx after)
23b2ce53 4259{
b3694847 4260 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4261
4262 INSN_UID (insn) = cur_insn_uid++;
4263
6fb5fa3c 4264 add_insn_after (insn, after, NULL);
23b2ce53
RS
4265 return insn;
4266}
4267
4268/* Emit the label LABEL after the insn AFTER. */
4269
4270rtx
502b8322 4271emit_label_after (rtx label, rtx after)
23b2ce53
RS
4272{
4273 /* This can be called twice for the same label
4274 as a result of the confusion that follows a syntax error!
4275 So make it harmless. */
4276 if (INSN_UID (label) == 0)
4277 {
4278 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4279 add_insn_after (label, after, NULL);
23b2ce53
RS
4280 }
4281
4282 return label;
4283}
4284
4285/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4286
4287rtx
a38e7aa5 4288emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4289{
b3694847 4290 rtx note = rtx_alloc (NOTE);
23b2ce53 4291 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4292 NOTE_KIND (note) = subtype;
ba4f7968 4293 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4294 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4295 add_insn_after (note, after, NULL);
23b2ce53
RS
4296 return note;
4297}
23b2ce53 4298\f
a7102479 4299/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4300rtx
502b8322 4301emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4302{
6fb5fa3c 4303 rtx last = emit_insn_after_noloc (pattern, after, NULL);
0d682900 4304
a7102479 4305 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4306 return last;
4307
2f937369
DM
4308 after = NEXT_INSN (after);
4309 while (1)
4310 {
a7102479 4311 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4312 INSN_LOCATOR (after) = loc;
2f937369
DM
4313 if (after == last)
4314 break;
4315 after = NEXT_INSN (after);
4316 }
0d682900
JH
4317 return last;
4318}
4319
a7102479
JH
4320/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4321rtx
4322emit_insn_after (rtx pattern, rtx after)
4323{
4324 if (INSN_P (after))
4325 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4326 else
6fb5fa3c 4327 return emit_insn_after_noloc (pattern, after, NULL);
a7102479
JH
4328}
4329
4330/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4331rtx
502b8322 4332emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4333{
a7102479 4334 rtx last = emit_jump_insn_after_noloc (pattern, after);
2f937369 4335
a7102479 4336 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4337 return last;
4338
2f937369
DM
4339 after = NEXT_INSN (after);
4340 while (1)
4341 {
a7102479 4342 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4343 INSN_LOCATOR (after) = loc;
2f937369
DM
4344 if (after == last)
4345 break;
4346 after = NEXT_INSN (after);
4347 }
0d682900
JH
4348 return last;
4349}
4350
a7102479
JH
4351/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4352rtx
4353emit_jump_insn_after (rtx pattern, rtx after)
4354{
4355 if (INSN_P (after))
4356 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4357 else
4358 return emit_jump_insn_after_noloc (pattern, after);
4359}
4360
4361/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4362rtx
502b8322 4363emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4364{
a7102479 4365 rtx last = emit_call_insn_after_noloc (pattern, after);
2f937369 4366
a7102479 4367 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4368 return last;
4369
2f937369
DM
4370 after = NEXT_INSN (after);
4371 while (1)
4372 {
a7102479 4373 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4374 INSN_LOCATOR (after) = loc;
2f937369
DM
4375 if (after == last)
4376 break;
4377 after = NEXT_INSN (after);
4378 }
0d682900
JH
4379 return last;
4380}
4381
a7102479
JH
4382/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4383rtx
4384emit_call_insn_after (rtx pattern, rtx after)
4385{
4386 if (INSN_P (after))
4387 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4388 else
4389 return emit_call_insn_after_noloc (pattern, after);
4390}
4391
4392/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4393rtx
502b8322 4394emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4395{
4396 rtx first = PREV_INSN (before);
6fb5fa3c 4397 rtx last = emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4398
4399 if (pattern == NULL_RTX || !loc)
4400 return last;
4401
26cb3993
JH
4402 if (!first)
4403 first = get_insns ();
4404 else
4405 first = NEXT_INSN (first);
a7102479
JH
4406 while (1)
4407 {
4408 if (active_insn_p (first) && !INSN_LOCATOR (first))
4409 INSN_LOCATOR (first) = loc;
4410 if (first == last)
4411 break;
4412 first = NEXT_INSN (first);
4413 }
4414 return last;
4415}
4416
4417/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4418rtx
4419emit_insn_before (rtx pattern, rtx before)
4420{
4421 if (INSN_P (before))
4422 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4423 else
6fb5fa3c 4424 return emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4425}
4426
4427/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4428rtx
4429emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4430{
4431 rtx first = PREV_INSN (before);
4432 rtx last = emit_jump_insn_before_noloc (pattern, before);
4433
4434 if (pattern == NULL_RTX)
4435 return last;
4436
4437 first = NEXT_INSN (first);
4438 while (1)
4439 {
4440 if (active_insn_p (first) && !INSN_LOCATOR (first))
4441 INSN_LOCATOR (first) = loc;
4442 if (first == last)
4443 break;
4444 first = NEXT_INSN (first);
4445 }
4446 return last;
4447}
4448
4449/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4450rtx
4451emit_jump_insn_before (rtx pattern, rtx before)
4452{
4453 if (INSN_P (before))
4454 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4455 else
4456 return emit_jump_insn_before_noloc (pattern, before);
4457}
4458
4459/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4460rtx
4461emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4462{
4463 rtx first = PREV_INSN (before);
4464 rtx last = emit_call_insn_before_noloc (pattern, before);
0d682900 4465
dd3adcf8
DJ
4466 if (pattern == NULL_RTX)
4467 return last;
4468
2f937369
DM
4469 first = NEXT_INSN (first);
4470 while (1)
4471 {
a7102479 4472 if (active_insn_p (first) && !INSN_LOCATOR (first))
0435312e 4473 INSN_LOCATOR (first) = loc;
2f937369
DM
4474 if (first == last)
4475 break;
4476 first = NEXT_INSN (first);
4477 }
0d682900
JH
4478 return last;
4479}
a7102479
JH
4480
4481/* like emit_call_insn_before_noloc,
4482 but set insn_locator according to before. */
4483rtx
4484emit_call_insn_before (rtx pattern, rtx before)
4485{
4486 if (INSN_P (before))
4487 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4488 else
4489 return emit_call_insn_before_noloc (pattern, before);
4490}
0d682900 4491\f
2f937369
DM
4492/* Take X and emit it at the end of the doubly-linked
4493 INSN list.
23b2ce53
RS
4494
4495 Returns the last insn emitted. */
4496
4497rtx
502b8322 4498emit_insn (rtx x)
23b2ce53 4499{
2f937369
DM
4500 rtx last = last_insn;
4501 rtx insn;
23b2ce53 4502
2f937369
DM
4503 if (x == NULL_RTX)
4504 return last;
23b2ce53 4505
2f937369
DM
4506 switch (GET_CODE (x))
4507 {
4508 case INSN:
4509 case JUMP_INSN:
4510 case CALL_INSN:
4511 case CODE_LABEL:
4512 case BARRIER:
4513 case NOTE:
4514 insn = x;
4515 while (insn)
23b2ce53 4516 {
2f937369 4517 rtx next = NEXT_INSN (insn);
23b2ce53 4518 add_insn (insn);
2f937369
DM
4519 last = insn;
4520 insn = next;
23b2ce53 4521 }
2f937369 4522 break;
23b2ce53 4523
2f937369
DM
4524#ifdef ENABLE_RTL_CHECKING
4525 case SEQUENCE:
5b0264cb 4526 gcc_unreachable ();
2f937369
DM
4527 break;
4528#endif
23b2ce53 4529
2f937369
DM
4530 default:
4531 last = make_insn_raw (x);
4532 add_insn (last);
4533 break;
23b2ce53
RS
4534 }
4535
4536 return last;
4537}
4538
2f937369
DM
4539/* Make an insn of code JUMP_INSN with pattern X
4540 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4541
4542rtx
502b8322 4543emit_jump_insn (rtx x)
23b2ce53 4544{
d950dee3 4545 rtx last = NULL_RTX, insn;
23b2ce53 4546
2f937369 4547 switch (GET_CODE (x))
23b2ce53 4548 {
2f937369
DM
4549 case INSN:
4550 case JUMP_INSN:
4551 case CALL_INSN:
4552 case CODE_LABEL:
4553 case BARRIER:
4554 case NOTE:
4555 insn = x;
4556 while (insn)
4557 {
4558 rtx next = NEXT_INSN (insn);
4559 add_insn (insn);
4560 last = insn;
4561 insn = next;
4562 }
4563 break;
e0a5c5eb 4564
2f937369
DM
4565#ifdef ENABLE_RTL_CHECKING
4566 case SEQUENCE:
5b0264cb 4567 gcc_unreachable ();
2f937369
DM
4568 break;
4569#endif
e0a5c5eb 4570
2f937369
DM
4571 default:
4572 last = make_jump_insn_raw (x);
4573 add_insn (last);
4574 break;
3c030e88 4575 }
e0a5c5eb
RS
4576
4577 return last;
4578}
4579
2f937369 4580/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4581 and add it to the end of the doubly-linked list. */
4582
4583rtx
502b8322 4584emit_call_insn (rtx x)
23b2ce53 4585{
2f937369
DM
4586 rtx insn;
4587
4588 switch (GET_CODE (x))
23b2ce53 4589 {
2f937369
DM
4590 case INSN:
4591 case JUMP_INSN:
4592 case CALL_INSN:
4593 case CODE_LABEL:
4594 case BARRIER:
4595 case NOTE:
4596 insn = emit_insn (x);
4597 break;
23b2ce53 4598
2f937369
DM
4599#ifdef ENABLE_RTL_CHECKING
4600 case SEQUENCE:
5b0264cb 4601 gcc_unreachable ();
2f937369
DM
4602 break;
4603#endif
23b2ce53 4604
2f937369
DM
4605 default:
4606 insn = make_call_insn_raw (x);
23b2ce53 4607 add_insn (insn);
2f937369 4608 break;
23b2ce53 4609 }
2f937369
DM
4610
4611 return insn;
23b2ce53
RS
4612}
4613
4614/* Add the label LABEL to the end of the doubly-linked list. */
4615
4616rtx
502b8322 4617emit_label (rtx label)
23b2ce53
RS
4618{
4619 /* This can be called twice for the same label
4620 as a result of the confusion that follows a syntax error!
4621 So make it harmless. */
4622 if (INSN_UID (label) == 0)
4623 {
4624 INSN_UID (label) = cur_insn_uid++;
4625 add_insn (label);
4626 }
4627 return label;
4628}
4629
4630/* Make an insn of code BARRIER
4631 and add it to the end of the doubly-linked list. */
4632
4633rtx
502b8322 4634emit_barrier (void)
23b2ce53 4635{
b3694847 4636 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4637 INSN_UID (barrier) = cur_insn_uid++;
4638 add_insn (barrier);
4639 return barrier;
4640}
4641
5f2fc772 4642/* Emit a copy of note ORIG. */
502b8322 4643
5f2fc772
NS
4644rtx
4645emit_note_copy (rtx orig)
4646{
4647 rtx note;
4648
5f2fc772
NS
4649 note = rtx_alloc (NOTE);
4650
4651 INSN_UID (note) = cur_insn_uid++;
4652 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 4653 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
4654 BLOCK_FOR_INSN (note) = NULL;
4655 add_insn (note);
4656
2e040219 4657 return note;
23b2ce53
RS
4658}
4659
2e040219
NS
4660/* Make an insn of code NOTE or type NOTE_NO
4661 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4662
4663rtx
a38e7aa5 4664emit_note (enum insn_note kind)
23b2ce53 4665{
b3694847 4666 rtx note;
23b2ce53 4667
23b2ce53
RS
4668 note = rtx_alloc (NOTE);
4669 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4670 NOTE_KIND (note) = kind;
dd107e66 4671 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4672 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4673 add_insn (note);
4674 return note;
4675}
4676
c41c1387
RS
4677/* Emit a clobber of lvalue X. */
4678
4679rtx
4680emit_clobber (rtx x)
4681{
4682 /* CONCATs should not appear in the insn stream. */
4683 if (GET_CODE (x) == CONCAT)
4684 {
4685 emit_clobber (XEXP (x, 0));
4686 return emit_clobber (XEXP (x, 1));
4687 }
4688 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4689}
4690
4691/* Return a sequence of insns to clobber lvalue X. */
4692
4693rtx
4694gen_clobber (rtx x)
4695{
4696 rtx seq;
4697
4698 start_sequence ();
4699 emit_clobber (x);
4700 seq = get_insns ();
4701 end_sequence ();
4702 return seq;
4703}
4704
4705/* Emit a use of rvalue X. */
4706
4707rtx
4708emit_use (rtx x)
4709{
4710 /* CONCATs should not appear in the insn stream. */
4711 if (GET_CODE (x) == CONCAT)
4712 {
4713 emit_use (XEXP (x, 0));
4714 return emit_use (XEXP (x, 1));
4715 }
4716 return emit_insn (gen_rtx_USE (VOIDmode, x));
4717}
4718
4719/* Return a sequence of insns to use rvalue X. */
4720
4721rtx
4722gen_use (rtx x)
4723{
4724 rtx seq;
4725
4726 start_sequence ();
4727 emit_use (x);
4728 seq = get_insns ();
4729 end_sequence ();
4730 return seq;
4731}
4732
23b2ce53 4733/* Cause next statement to emit a line note even if the line number
0cea056b 4734 has not changed. */
23b2ce53
RS
4735
4736void
502b8322 4737force_next_line_note (void)
23b2ce53 4738{
6773e15f 4739 last_location = -1;
23b2ce53 4740}
87b47c85
AM
4741
4742/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4743 note of this type already exists, remove it first. */
87b47c85 4744
3d238248 4745rtx
502b8322 4746set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4747{
4748 rtx note = find_reg_note (insn, kind, NULL_RTX);
4749
52488da1
JW
4750 switch (kind)
4751 {
4752 case REG_EQUAL:
4753 case REG_EQUIV:
4754 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4755 has multiple sets (some callers assume single_set
4756 means the insn only has one set, when in fact it
4757 means the insn only has one * useful * set). */
4758 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4759 {
5b0264cb 4760 gcc_assert (!note);
52488da1
JW
4761 return NULL_RTX;
4762 }
4763
4764 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4765 It serves no useful purpose and breaks eliminate_regs. */
4766 if (GET_CODE (datum) == ASM_OPERANDS)
4767 return NULL_RTX;
6fb5fa3c
DB
4768
4769 if (note)
4770 {
4771 XEXP (note, 0) = datum;
4772 df_notes_rescan (insn);
4773 return note;
4774 }
52488da1
JW
4775 break;
4776
4777 default:
6fb5fa3c
DB
4778 if (note)
4779 {
4780 XEXP (note, 0) = datum;
4781 return note;
4782 }
52488da1
JW
4783 break;
4784 }
3d238248 4785
65c5f2a6 4786 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
4787
4788 switch (kind)
3d238248 4789 {
6fb5fa3c
DB
4790 case REG_EQUAL:
4791 case REG_EQUIV:
4792 df_notes_rescan (insn);
4793 break;
4794 default:
4795 break;
3d238248 4796 }
87b47c85 4797
3d238248 4798 return REG_NOTES (insn);
87b47c85 4799}
23b2ce53
RS
4800\f
4801/* Return an indication of which type of insn should have X as a body.
4802 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4803
d78db459 4804static enum rtx_code
502b8322 4805classify_insn (rtx x)
23b2ce53 4806{
4b4bf941 4807 if (LABEL_P (x))
23b2ce53
RS
4808 return CODE_LABEL;
4809 if (GET_CODE (x) == CALL)
4810 return CALL_INSN;
4811 if (GET_CODE (x) == RETURN)
4812 return JUMP_INSN;
4813 if (GET_CODE (x) == SET)
4814 {
4815 if (SET_DEST (x) == pc_rtx)
4816 return JUMP_INSN;
4817 else if (GET_CODE (SET_SRC (x)) == CALL)
4818 return CALL_INSN;
4819 else
4820 return INSN;
4821 }
4822 if (GET_CODE (x) == PARALLEL)
4823 {
b3694847 4824 int j;
23b2ce53
RS
4825 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4826 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4827 return CALL_INSN;
4828 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4829 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4830 return JUMP_INSN;
4831 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4832 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4833 return CALL_INSN;
4834 }
4835 return INSN;
4836}
4837
4838/* Emit the rtl pattern X as an appropriate kind of insn.
4839 If X is a label, it is simply added into the insn chain. */
4840
4841rtx
502b8322 4842emit (rtx x)
23b2ce53
RS
4843{
4844 enum rtx_code code = classify_insn (x);
4845
5b0264cb 4846 switch (code)
23b2ce53 4847 {
5b0264cb
NS
4848 case CODE_LABEL:
4849 return emit_label (x);
4850 case INSN:
4851 return emit_insn (x);
4852 case JUMP_INSN:
4853 {
4854 rtx insn = emit_jump_insn (x);
4855 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4856 return emit_barrier ();
4857 return insn;
4858 }
4859 case CALL_INSN:
4860 return emit_call_insn (x);
4861 default:
4862 gcc_unreachable ();
23b2ce53 4863 }
23b2ce53
RS
4864}
4865\f
e2500fed 4866/* Space for free sequence stack entries. */
1431042e 4867static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 4868
4dfa0342
RH
4869/* Begin emitting insns to a sequence. If this sequence will contain
4870 something that might cause the compiler to pop arguments to function
4871 calls (because those pops have previously been deferred; see
4872 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4873 before calling this function. That will ensure that the deferred
4874 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
4875
4876void
502b8322 4877start_sequence (void)
23b2ce53
RS
4878{
4879 struct sequence_stack *tem;
4880
e2500fed
GK
4881 if (free_sequence_stack != NULL)
4882 {
4883 tem = free_sequence_stack;
4884 free_sequence_stack = tem->next;
4885 }
4886 else
1b4572a8 4887 tem = GGC_NEW (struct sequence_stack);
23b2ce53 4888
49ad7cfa 4889 tem->next = seq_stack;
23b2ce53
RS
4890 tem->first = first_insn;
4891 tem->last = last_insn;
4892
49ad7cfa 4893 seq_stack = tem;
23b2ce53
RS
4894
4895 first_insn = 0;
4896 last_insn = 0;
4897}
4898
5c7a310f
MM
4899/* Set up the insn chain starting with FIRST as the current sequence,
4900 saving the previously current one. See the documentation for
4901 start_sequence for more information about how to use this function. */
23b2ce53
RS
4902
4903void
502b8322 4904push_to_sequence (rtx first)
23b2ce53
RS
4905{
4906 rtx last;
4907
4908 start_sequence ();
4909
4910 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4911
4912 first_insn = first;
4913 last_insn = last;
4914}
4915
bb27eeda
SE
4916/* Like push_to_sequence, but take the last insn as an argument to avoid
4917 looping through the list. */
4918
4919void
4920push_to_sequence2 (rtx first, rtx last)
4921{
4922 start_sequence ();
4923
4924 first_insn = first;
4925 last_insn = last;
4926}
4927
f15ae3a1
TW
4928/* Set up the outer-level insn chain
4929 as the current sequence, saving the previously current one. */
4930
4931void
502b8322 4932push_topmost_sequence (void)
f15ae3a1 4933{
aefdd5ab 4934 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
4935
4936 start_sequence ();
4937
49ad7cfa 4938 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4939 top = stack;
4940
4941 first_insn = top->first;
4942 last_insn = top->last;
4943}
4944
4945/* After emitting to the outer-level insn chain, update the outer-level
4946 insn chain, and restore the previous saved state. */
4947
4948void
502b8322 4949pop_topmost_sequence (void)
f15ae3a1 4950{
aefdd5ab 4951 struct sequence_stack *stack, *top = NULL;
f15ae3a1 4952
49ad7cfa 4953 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4954 top = stack;
4955
4956 top->first = first_insn;
4957 top->last = last_insn;
4958
4959 end_sequence ();
4960}
4961
23b2ce53
RS
4962/* After emitting to a sequence, restore previous saved state.
4963
5c7a310f 4964 To get the contents of the sequence just made, you must call
2f937369 4965 `get_insns' *before* calling here.
5c7a310f
MM
4966
4967 If the compiler might have deferred popping arguments while
4968 generating this sequence, and this sequence will not be immediately
4969 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 4970 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
4971 pops are inserted into this sequence, and not into some random
4972 location in the instruction stream. See INHIBIT_DEFER_POP for more
4973 information about deferred popping of arguments. */
23b2ce53
RS
4974
4975void
502b8322 4976end_sequence (void)
23b2ce53 4977{
49ad7cfa 4978 struct sequence_stack *tem = seq_stack;
23b2ce53
RS
4979
4980 first_insn = tem->first;
4981 last_insn = tem->last;
49ad7cfa 4982 seq_stack = tem->next;
23b2ce53 4983
e2500fed
GK
4984 memset (tem, 0, sizeof (*tem));
4985 tem->next = free_sequence_stack;
4986 free_sequence_stack = tem;
23b2ce53
RS
4987}
4988
4989/* Return 1 if currently emitting into a sequence. */
4990
4991int
502b8322 4992in_sequence_p (void)
23b2ce53 4993{
49ad7cfa 4994 return seq_stack != 0;
23b2ce53 4995}
23b2ce53 4996\f
59ec66dc
MM
4997/* Put the various virtual registers into REGNO_REG_RTX. */
4998
2bbdec73 4999static void
bd60bab2 5000init_virtual_regs (void)
59ec66dc 5001{
bd60bab2
JH
5002 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5003 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5004 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5005 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5006 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
49ad7cfa
BS
5007}
5008
da43a810
BS
5009\f
5010/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5011static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5012static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5013static int copy_insn_n_scratches;
5014
5015/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5016 copied an ASM_OPERANDS.
5017 In that case, it is the original input-operand vector. */
5018static rtvec orig_asm_operands_vector;
5019
5020/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5021 copied an ASM_OPERANDS.
5022 In that case, it is the copied input-operand vector. */
5023static rtvec copy_asm_operands_vector;
5024
5025/* Likewise for the constraints vector. */
5026static rtvec orig_asm_constraints_vector;
5027static rtvec copy_asm_constraints_vector;
5028
5029/* Recursively create a new copy of an rtx for copy_insn.
5030 This function differs from copy_rtx in that it handles SCRATCHes and
5031 ASM_OPERANDs properly.
5032 Normally, this function is not used directly; use copy_insn as front end.
5033 However, you could first copy an insn pattern with copy_insn and then use
5034 this function afterwards to properly copy any REG_NOTEs containing
5035 SCRATCHes. */
5036
5037rtx
502b8322 5038copy_insn_1 (rtx orig)
da43a810 5039{
b3694847
SS
5040 rtx copy;
5041 int i, j;
5042 RTX_CODE code;
5043 const char *format_ptr;
da43a810
BS
5044
5045 code = GET_CODE (orig);
5046
5047 switch (code)
5048 {
5049 case REG:
da43a810
BS
5050 case CONST_INT:
5051 case CONST_DOUBLE:
091a3ac7 5052 case CONST_FIXED:
69ef87e2 5053 case CONST_VECTOR:
da43a810
BS
5054 case SYMBOL_REF:
5055 case CODE_LABEL:
5056 case PC:
5057 case CC0:
da43a810 5058 return orig;
3e89ed8d
JH
5059 case CLOBBER:
5060 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5061 return orig;
5062 break;
da43a810
BS
5063
5064 case SCRATCH:
5065 for (i = 0; i < copy_insn_n_scratches; i++)
5066 if (copy_insn_scratch_in[i] == orig)
5067 return copy_insn_scratch_out[i];
5068 break;
5069
5070 case CONST:
6fb5fa3c 5071 if (shared_const_p (orig))
da43a810
BS
5072 return orig;
5073 break;
750c9258 5074
da43a810
BS
5075 /* A MEM with a constant address is not sharable. The problem is that
5076 the constant address may need to be reloaded. If the mem is shared,
5077 then reloading one copy of this mem will cause all copies to appear
5078 to have been reloaded. */
5079
5080 default:
5081 break;
5082 }
5083
aacd3885
RS
5084 /* Copy the various flags, fields, and other information. We assume
5085 that all fields need copying, and then clear the fields that should
da43a810
BS
5086 not be copied. That is the sensible default behavior, and forces
5087 us to explicitly document why we are *not* copying a flag. */
aacd3885 5088 copy = shallow_copy_rtx (orig);
da43a810
BS
5089
5090 /* We do not copy the USED flag, which is used as a mark bit during
5091 walks over the RTL. */
2adc7f12 5092 RTX_FLAG (copy, used) = 0;
da43a810
BS
5093
5094 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5095 if (INSN_P (orig))
da43a810 5096 {
2adc7f12
JJ
5097 RTX_FLAG (copy, jump) = 0;
5098 RTX_FLAG (copy, call) = 0;
5099 RTX_FLAG (copy, frame_related) = 0;
da43a810 5100 }
750c9258 5101
da43a810
BS
5102 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5103
5104 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5105 switch (*format_ptr++)
5106 {
5107 case 'e':
5108 if (XEXP (orig, i) != NULL)
5109 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5110 break;
da43a810 5111
aacd3885
RS
5112 case 'E':
5113 case 'V':
5114 if (XVEC (orig, i) == orig_asm_constraints_vector)
5115 XVEC (copy, i) = copy_asm_constraints_vector;
5116 else if (XVEC (orig, i) == orig_asm_operands_vector)
5117 XVEC (copy, i) = copy_asm_operands_vector;
5118 else if (XVEC (orig, i) != NULL)
5119 {
5120 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5121 for (j = 0; j < XVECLEN (copy, i); j++)
5122 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5123 }
5124 break;
da43a810 5125
aacd3885
RS
5126 case 't':
5127 case 'w':
5128 case 'i':
5129 case 's':
5130 case 'S':
5131 case 'u':
5132 case '0':
5133 /* These are left unchanged. */
5134 break;
da43a810 5135
aacd3885
RS
5136 default:
5137 gcc_unreachable ();
5138 }
da43a810
BS
5139
5140 if (code == SCRATCH)
5141 {
5142 i = copy_insn_n_scratches++;
5b0264cb 5143 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5144 copy_insn_scratch_in[i] = orig;
5145 copy_insn_scratch_out[i] = copy;
5146 }
5147 else if (code == ASM_OPERANDS)
5148 {
6462bb43
AO
5149 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5150 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5151 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5152 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5153 }
5154
5155 return copy;
5156}
5157
5158/* Create a new copy of an rtx.
5159 This function differs from copy_rtx in that it handles SCRATCHes and
5160 ASM_OPERANDs properly.
5161 INSN doesn't really have to be a full INSN; it could be just the
5162 pattern. */
5163rtx
502b8322 5164copy_insn (rtx insn)
da43a810
BS
5165{
5166 copy_insn_n_scratches = 0;
5167 orig_asm_operands_vector = 0;
5168 orig_asm_constraints_vector = 0;
5169 copy_asm_operands_vector = 0;
5170 copy_asm_constraints_vector = 0;
5171 return copy_insn_1 (insn);
5172}
59ec66dc 5173
23b2ce53
RS
5174/* Initialize data structures and variables in this file
5175 before generating rtl for each function. */
5176
5177void
502b8322 5178init_emit (void)
23b2ce53 5179{
23b2ce53
RS
5180 first_insn = NULL;
5181 last_insn = NULL;
5182 cur_insn_uid = 1;
5183 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 5184 last_location = UNKNOWN_LOCATION;
23b2ce53 5185 first_label_num = label_num;
49ad7cfa 5186 seq_stack = NULL;
23b2ce53 5187
23b2ce53
RS
5188 /* Init the tables that describe all the pseudo regs. */
5189
3e029763 5190 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5191
3e029763 5192 crtl->emit.regno_pointer_align
1b4572a8 5193 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5194
750c9258 5195 regno_reg_rtx
1b4572a8 5196 = GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
0d4903b8 5197
e50126e8 5198 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876
JL
5199 memcpy (regno_reg_rtx,
5200 static_regno_reg_rtx,
5201 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5202
23b2ce53 5203 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5204 init_virtual_regs ();
740ab4a2
RK
5205
5206 /* Indicate that the virtual registers and stack locations are
5207 all pointers. */
3502dc9c
JDA
5208 REG_POINTER (stack_pointer_rtx) = 1;
5209 REG_POINTER (frame_pointer_rtx) = 1;
5210 REG_POINTER (hard_frame_pointer_rtx) = 1;
5211 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5212
3502dc9c
JDA
5213 REG_POINTER (virtual_incoming_args_rtx) = 1;
5214 REG_POINTER (virtual_stack_vars_rtx) = 1;
5215 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5216 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5217 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5218
86fe05e0 5219#ifdef STACK_BOUNDARY
bdb429a5
RK
5220 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5221 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5222 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5223 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5224
5225 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5226 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5227 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5228 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5229 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5230#endif
5231
5e82e7bd
JVA
5232#ifdef INIT_EXPANDERS
5233 INIT_EXPANDERS;
5234#endif
23b2ce53
RS
5235}
5236
a73b091d 5237/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5238
5239static rtx
a73b091d 5240gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5241{
5242 rtx tem;
5243 rtvec v;
5244 int units, i;
5245 enum machine_mode inner;
5246
5247 units = GET_MODE_NUNITS (mode);
5248 inner = GET_MODE_INNER (mode);
5249
15ed7b52
JG
5250 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5251
69ef87e2
AH
5252 v = rtvec_alloc (units);
5253
a73b091d
JW
5254 /* We need to call this function after we set the scalar const_tiny_rtx
5255 entries. */
5256 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5257
5258 for (i = 0; i < units; ++i)
a73b091d 5259 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5260
a06e3c40 5261 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5262 return tem;
5263}
5264
a06e3c40 5265/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5266 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5267rtx
502b8322 5268gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5269{
a73b091d
JW
5270 enum machine_mode inner = GET_MODE_INNER (mode);
5271 int nunits = GET_MODE_NUNITS (mode);
5272 rtx x;
a06e3c40
R
5273 int i;
5274
a73b091d
JW
5275 /* Check to see if all of the elements have the same value. */
5276 x = RTVEC_ELT (v, nunits - 1);
5277 for (i = nunits - 2; i >= 0; i--)
5278 if (RTVEC_ELT (v, i) != x)
5279 break;
5280
5281 /* If the values are all the same, check to see if we can use one of the
5282 standard constant vectors. */
5283 if (i == -1)
5284 {
5285 if (x == CONST0_RTX (inner))
5286 return CONST0_RTX (mode);
5287 else if (x == CONST1_RTX (inner))
5288 return CONST1_RTX (mode);
5289 }
5290
5291 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5292}
5293
b5deb7b6
SL
5294/* Initialise global register information required by all functions. */
5295
5296void
5297init_emit_regs (void)
5298{
5299 int i;
5300
5301 /* Reset register attributes */
5302 htab_empty (reg_attrs_htab);
5303
5304 /* We need reg_raw_mode, so initialize the modes now. */
5305 init_reg_modes_target ();
5306
5307 /* Assign register numbers to the globally defined register rtx. */
5308 pc_rtx = gen_rtx_PC (VOIDmode);
5309 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5310 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5311 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5312 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5313 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5314 virtual_incoming_args_rtx =
5315 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5316 virtual_stack_vars_rtx =
5317 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5318 virtual_stack_dynamic_rtx =
5319 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5320 virtual_outgoing_args_rtx =
5321 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5322 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5323
5324 /* Initialize RTL for commonly used hard registers. These are
5325 copied into regno_reg_rtx as we begin to compile each function. */
5326 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5327 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5328
5329#ifdef RETURN_ADDRESS_POINTER_REGNUM
5330 return_address_pointer_rtx
5331 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5332#endif
5333
5334#ifdef STATIC_CHAIN_REGNUM
5335 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5336
5337#ifdef STATIC_CHAIN_INCOMING_REGNUM
5338 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5339 static_chain_incoming_rtx
5340 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5341 else
5342#endif
5343 static_chain_incoming_rtx = static_chain_rtx;
5344#endif
5345
5346#ifdef STATIC_CHAIN
5347 static_chain_rtx = STATIC_CHAIN;
5348
5349#ifdef STATIC_CHAIN_INCOMING
5350 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5351#else
5352 static_chain_incoming_rtx = static_chain_rtx;
5353#endif
5354#endif
5355
5356 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5357 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5358 else
5359 pic_offset_table_rtx = NULL_RTX;
5360}
5361
23b2ce53
RS
5362/* Create some permanent unique rtl objects shared between all functions.
5363 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5364
5365void
502b8322 5366init_emit_once (int line_numbers)
23b2ce53
RS
5367{
5368 int i;
5369 enum machine_mode mode;
9ec36da5 5370 enum machine_mode double_mode;
23b2ce53 5371
091a3ac7
CF
5372 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5373 hash tables. */
17211ab5
GK
5374 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5375 const_int_htab_eq, NULL);
173b24b9 5376
17211ab5
GK
5377 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5378 const_double_htab_eq, NULL);
5692c7bc 5379
091a3ac7
CF
5380 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5381 const_fixed_htab_eq, NULL);
5382
17211ab5
GK
5383 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5384 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5385 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5386 reg_attrs_htab_eq, NULL);
67673f5c 5387
23b2ce53
RS
5388 no_line_numbers = ! line_numbers;
5389
43fa6302
AS
5390 /* Compute the word and byte modes. */
5391
5392 byte_mode = VOIDmode;
5393 word_mode = VOIDmode;
5394 double_mode = VOIDmode;
5395
15ed7b52
JG
5396 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5397 mode != VOIDmode;
43fa6302
AS
5398 mode = GET_MODE_WIDER_MODE (mode))
5399 {
5400 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5401 && byte_mode == VOIDmode)
5402 byte_mode = mode;
5403
5404 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5405 && word_mode == VOIDmode)
5406 word_mode = mode;
5407 }
5408
15ed7b52
JG
5409 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5410 mode != VOIDmode;
43fa6302
AS
5411 mode = GET_MODE_WIDER_MODE (mode))
5412 {
5413 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5414 && double_mode == VOIDmode)
5415 double_mode = mode;
5416 }
5417
5418 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5419
5da077de 5420#ifdef INIT_EXPANDERS
414c4dc4
NC
5421 /* This is to initialize {init|mark|free}_machine_status before the first
5422 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5423 end which calls push_function_context_to before the first call to
5da077de
AS
5424 init_function_start. */
5425 INIT_EXPANDERS;
5426#endif
5427
23b2ce53
RS
5428 /* Create the unique rtx's for certain rtx codes and operand values. */
5429
a2a8cc44 5430 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5431 tries to use these variables. */
23b2ce53 5432 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5433 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5434 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5435
68d75312
JC
5436 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5437 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5438 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5439 else
3b80f6ca 5440 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5441
5692c7bc
ZW
5442 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5443 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5444 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5445
5446 dconstm1 = dconst1;
5447 dconstm1.sign = 1;
03f2ea93
RS
5448
5449 dconsthalf = dconst1;
1e92bbb9 5450 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5451
f7657db9 5452 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5453 {
aefa9d43 5454 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5455 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5456
15ed7b52
JG
5457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5458 mode != VOIDmode;
5459 mode = GET_MODE_WIDER_MODE (mode))
5460 const_tiny_rtx[i][(int) mode] =
5461 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5462
5463 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5464 mode != VOIDmode;
23b2ce53 5465 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5466 const_tiny_rtx[i][(int) mode] =
5467 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5468
906c4e36 5469 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5470
15ed7b52
JG
5471 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5472 mode != VOIDmode;
23b2ce53 5473 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5474 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5475
5476 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5477 mode != VOIDmode;
5478 mode = GET_MODE_WIDER_MODE (mode))
5479 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5480 }
5481
e90721b1
AP
5482 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5483 mode != VOIDmode;
5484 mode = GET_MODE_WIDER_MODE (mode))
5485 {
5486 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5487 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5488 }
5489
5490 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5491 mode != VOIDmode;
5492 mode = GET_MODE_WIDER_MODE (mode))
5493 {
5494 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5495 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5496 }
5497
69ef87e2
AH
5498 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5499 mode != VOIDmode;
5500 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5501 {
5502 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5503 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5504 }
69ef87e2
AH
5505
5506 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5507 mode != VOIDmode;
5508 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5509 {
5510 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5511 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5512 }
69ef87e2 5513
325217ed
CF
5514 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5515 mode != VOIDmode;
5516 mode = GET_MODE_WIDER_MODE (mode))
5517 {
5518 FCONST0(mode).data.high = 0;
5519 FCONST0(mode).data.low = 0;
5520 FCONST0(mode).mode = mode;
091a3ac7
CF
5521 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5522 FCONST0 (mode), mode);
325217ed
CF
5523 }
5524
5525 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5526 mode != VOIDmode;
5527 mode = GET_MODE_WIDER_MODE (mode))
5528 {
5529 FCONST0(mode).data.high = 0;
5530 FCONST0(mode).data.low = 0;
5531 FCONST0(mode).mode = mode;
091a3ac7
CF
5532 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5533 FCONST0 (mode), mode);
325217ed
CF
5534 }
5535
5536 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5537 mode != VOIDmode;
5538 mode = GET_MODE_WIDER_MODE (mode))
5539 {
5540 FCONST0(mode).data.high = 0;
5541 FCONST0(mode).data.low = 0;
5542 FCONST0(mode).mode = mode;
091a3ac7
CF
5543 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5544 FCONST0 (mode), mode);
325217ed
CF
5545
5546 /* We store the value 1. */
5547 FCONST1(mode).data.high = 0;
5548 FCONST1(mode).data.low = 0;
5549 FCONST1(mode).mode = mode;
5550 lshift_double (1, 0, GET_MODE_FBIT (mode),
5551 2 * HOST_BITS_PER_WIDE_INT,
5552 &FCONST1(mode).data.low,
5553 &FCONST1(mode).data.high,
5554 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5555 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5556 FCONST1 (mode), mode);
325217ed
CF
5557 }
5558
5559 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5560 mode != VOIDmode;
5561 mode = GET_MODE_WIDER_MODE (mode))
5562 {
5563 FCONST0(mode).data.high = 0;
5564 FCONST0(mode).data.low = 0;
5565 FCONST0(mode).mode = mode;
091a3ac7
CF
5566 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5567 FCONST0 (mode), mode);
325217ed
CF
5568
5569 /* We store the value 1. */
5570 FCONST1(mode).data.high = 0;
5571 FCONST1(mode).data.low = 0;
5572 FCONST1(mode).mode = mode;
5573 lshift_double (1, 0, GET_MODE_FBIT (mode),
5574 2 * HOST_BITS_PER_WIDE_INT,
5575 &FCONST1(mode).data.low,
5576 &FCONST1(mode).data.high,
5577 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5578 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5579 FCONST1 (mode), mode);
5580 }
5581
5582 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5583 mode != VOIDmode;
5584 mode = GET_MODE_WIDER_MODE (mode))
5585 {
5586 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5587 }
5588
5589 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5590 mode != VOIDmode;
5591 mode = GET_MODE_WIDER_MODE (mode))
5592 {
5593 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5594 }
5595
5596 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5597 mode != VOIDmode;
5598 mode = GET_MODE_WIDER_MODE (mode))
5599 {
5600 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5601 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5602 }
5603
5604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5605 mode != VOIDmode;
5606 mode = GET_MODE_WIDER_MODE (mode))
5607 {
5608 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5609 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5610 }
5611
dbbbbf3b
JDA
5612 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5613 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5614 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5615
f0417c82
RH
5616 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5617 if (STORE_FLAG_VALUE == 1)
5618 const_tiny_rtx[1][(int) BImode] = const1_rtx;
23b2ce53 5619}
a11759a3 5620\f
969d70ca
JH
5621/* Produce exact duplicate of insn INSN after AFTER.
5622 Care updating of libcall regions if present. */
5623
5624rtx
502b8322 5625emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5626{
60564289 5627 rtx new_rtx, link;
969d70ca
JH
5628
5629 switch (GET_CODE (insn))
5630 {
5631 case INSN:
60564289 5632 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5633 break;
5634
5635 case JUMP_INSN:
60564289 5636 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5637 break;
5638
5639 case CALL_INSN:
60564289 5640 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5641 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5642 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5643 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5644 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5645 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5646 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5647 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 5648 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
5649 break;
5650
5651 default:
5b0264cb 5652 gcc_unreachable ();
969d70ca
JH
5653 }
5654
5655 /* Update LABEL_NUSES. */
60564289 5656 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 5657
60564289 5658 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ba4f7968 5659
0a3d71f5
JW
5660 /* If the old insn is frame related, then so is the new one. This is
5661 primarily needed for IA-64 unwind info which marks epilogue insns,
5662 which may be duplicated by the basic block reordering code. */
60564289 5663 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 5664
cf7c4aa6
HPN
5665 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5666 will make them. REG_LABEL_TARGETs are created there too, but are
5667 supposed to be sticky, so we copy them. */
969d70ca 5668 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 5669 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
5670 {
5671 if (GET_CODE (link) == EXPR_LIST)
60564289 5672 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 5673 copy_insn_1 (XEXP (link, 0)));
969d70ca 5674 else
60564289 5675 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
5676 }
5677
60564289
KG
5678 INSN_CODE (new_rtx) = INSN_CODE (insn);
5679 return new_rtx;
969d70ca 5680}
e2500fed 5681
1431042e 5682static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5683rtx
5684gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5685{
5686 if (hard_reg_clobbers[mode][regno])
5687 return hard_reg_clobbers[mode][regno];
5688 else
5689 return (hard_reg_clobbers[mode][regno] =
5690 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5691}
5692
e2500fed 5693#include "gt-emit-rtl.h"