]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
re PR rtl-optimization/36929 (internal compiler error: Segmentation fault)
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
2d593c86 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
b6f65e3c 4 Free Software Foundation, Inc.
23b2ce53 5
1322177d 6This file is part of GCC.
23b2ce53 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
23b2ce53 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
23b2ce53
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
23b2ce53
RS
21
22
23/* Middle-to-low level generation of rtx code and insns.
24
f822fcf7
KH
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
23b2ce53
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
01198c2f 40#include "toplev.h"
23b2ce53 41#include "rtl.h"
a25c7971 42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
23b2ce53
RS
44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
aff48bca 48#include "hard-reg-set.h"
c13e8210 49#include "hashtab.h"
23b2ce53 50#include "insn-config.h"
e9a25f70 51#include "recog.h"
23b2ce53 52#include "real.h"
325217ed 53#include "fixed-value.h"
0dfa1860 54#include "bitmap.h"
a05924f9 55#include "basic-block.h"
87ff9c8e 56#include "ggc.h"
e1772ac0 57#include "debug.h"
d23c55c2 58#include "langhooks.h"
ef330312 59#include "tree-pass.h"
6fb5fa3c 60#include "df.h"
ca695ac9 61
1d445e9e
ILT
62/* Commonly used modes. */
63
0f41302f
MS
64enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
65enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 66enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 67enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 68
bd60bab2
JH
69/* Datastructures maintained for currently processed function in RTL form. */
70
3e029763 71struct rtl_data x_rtl;
bd60bab2
JH
72
73/* Indexed by pseudo register number, gives the rtx for that pseudo.
74 Allocated in parallel with regno_pointer_align.
75 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
76 with length attribute nested in top level structures. */
77
78rtx * regno_reg_rtx;
23b2ce53
RS
79
80/* This is *not* reset after each function. It gives each CODE_LABEL
81 in the entire compilation a unique label number. */
82
044b4de3 83static GTY(()) int label_num = 1;
23b2ce53 84
23b2ce53
RS
85/* Nonzero means do not generate NOTEs for source line numbers. */
86
87static int no_line_numbers;
88
89/* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
5692c7bc
ZW
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
23b2ce53 93
5da077de 94rtx global_rtl[GR_MAX];
23b2ce53 95
6cde4876
JL
96/* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
23b2ce53
RS
102/* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
68d75312
JC
108rtx const_true_rtx;
109
23b2ce53
RS
110REAL_VALUE_TYPE dconst0;
111REAL_VALUE_TYPE dconst1;
112REAL_VALUE_TYPE dconst2;
113REAL_VALUE_TYPE dconstm1;
03f2ea93 114REAL_VALUE_TYPE dconsthalf;
23b2ce53 115
325217ed
CF
116/* Record fixed-point constant 0 and 1. */
117FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
118FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
119
23b2ce53
RS
120/* All references to the following fixed hard registers go through
121 these unique rtl objects. On machines where the frame-pointer and
122 arg-pointer are the same register, they use the same unique object.
123
124 After register allocation, other rtl objects which used to be pseudo-regs
125 may be clobbered to refer to the frame-pointer register.
126 But references that were originally to the frame-pointer can be
127 distinguished from the others because they contain frame_pointer_rtx.
128
ac6f08b0
DE
129 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
130 tricky: until register elimination has taken place hard_frame_pointer_rtx
750c9258 131 should be used if it is being set, and frame_pointer_rtx otherwise. After
ac6f08b0
DE
132 register elimination hard_frame_pointer_rtx should always be used.
133 On machines where the two registers are same (most) then these are the
134 same.
135
23b2ce53
RS
136 In an inline procedure, the stack and frame pointer rtxs may not be
137 used for anything else. */
23b2ce53
RS
138rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
139rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
140rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141
a4417a86
JW
142/* This is used to implement __builtin_return_address for some machines.
143 See for instance the MIPS port. */
144rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145
23b2ce53
RS
146/* We make one copy of (const_int C) where C is in
147 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
148 to save space during the compilation and simplify comparisons of
149 integers. */
150
5da077de 151rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 152
c13e8210
MM
153/* A hash table storing CONST_INTs whose absolute value is greater
154 than MAX_SAVED_CONST_INT. */
155
e2500fed
GK
156static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
157 htab_t const_int_htab;
c13e8210 158
173b24b9 159/* A hash table storing memory attribute structures. */
e2500fed
GK
160static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
161 htab_t mem_attrs_htab;
173b24b9 162
a560d4d4
JH
163/* A hash table storing register attribute structures. */
164static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
165 htab_t reg_attrs_htab;
166
5692c7bc 167/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
168static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_double_htab;
5692c7bc 170
091a3ac7
CF
171/* A hash table storing all CONST_FIXEDs. */
172static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
173 htab_t const_fixed_htab;
174
3e029763
JH
175#define first_insn (crtl->emit.x_first_insn)
176#define last_insn (crtl->emit.x_last_insn)
177#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
178#define last_location (crtl->emit.x_last_location)
179#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 180
502b8322 181static rtx make_call_insn_raw (rtx);
502b8322 182static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 183static void set_used_decls (tree);
502b8322
AJ
184static void mark_label_nuses (rtx);
185static hashval_t const_int_htab_hash (const void *);
186static int const_int_htab_eq (const void *, const void *);
187static hashval_t const_double_htab_hash (const void *);
188static int const_double_htab_eq (const void *, const void *);
189static rtx lookup_const_double (rtx);
091a3ac7
CF
190static hashval_t const_fixed_htab_hash (const void *);
191static int const_fixed_htab_eq (const void *, const void *);
192static rtx lookup_const_fixed (rtx);
502b8322
AJ
193static hashval_t mem_attrs_htab_hash (const void *);
194static int mem_attrs_htab_eq (const void *, const void *);
4862826d 195static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
502b8322
AJ
196 enum machine_mode);
197static hashval_t reg_attrs_htab_hash (const void *);
198static int reg_attrs_htab_eq (const void *, const void *);
199static reg_attrs *get_reg_attrs (tree, int);
200static tree component_ref_for_mem_expr (tree);
a73b091d 201static rtx gen_const_vector (enum machine_mode, int);
32b32b16 202static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 203
6b24c259
JH
204/* Probability of the conditional branch currently proceeded by try_split.
205 Set to -1 otherwise. */
206int split_branch_probability = -1;
ca695ac9 207\f
c13e8210
MM
208/* Returns a hash code for X (which is a really a CONST_INT). */
209
210static hashval_t
502b8322 211const_int_htab_hash (const void *x)
c13e8210 212{
f7d504c2 213 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
214}
215
cc2902df 216/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
217 CONST_INT) is the same as that given by Y (which is really a
218 HOST_WIDE_INT *). */
219
220static int
502b8322 221const_int_htab_eq (const void *x, const void *y)
c13e8210 222{
f7d504c2 223 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
224}
225
226/* Returns a hash code for X (which is really a CONST_DOUBLE). */
227static hashval_t
502b8322 228const_double_htab_hash (const void *x)
5692c7bc 229{
f7d504c2 230 const_rtx const value = (const_rtx) x;
46b33600 231 hashval_t h;
5692c7bc 232
46b33600
RH
233 if (GET_MODE (value) == VOIDmode)
234 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
235 else
fe352c29 236 {
15c812e3 237 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
238 /* MODE is used in the comparison, so it should be in the hash. */
239 h ^= GET_MODE (value);
240 }
5692c7bc
ZW
241 return h;
242}
243
cc2902df 244/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
245 is the same as that represented by Y (really a ...) */
246static int
502b8322 247const_double_htab_eq (const void *x, const void *y)
5692c7bc 248{
f7d504c2 249 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
250
251 if (GET_MODE (a) != GET_MODE (b))
252 return 0;
8580f7a0
RH
253 if (GET_MODE (a) == VOIDmode)
254 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
255 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
256 else
257 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
258 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
259}
260
091a3ac7
CF
261/* Returns a hash code for X (which is really a CONST_FIXED). */
262
263static hashval_t
264const_fixed_htab_hash (const void *x)
265{
3101faab 266 const_rtx const value = (const_rtx) x;
091a3ac7
CF
267 hashval_t h;
268
269 h = fixed_hash (CONST_FIXED_VALUE (value));
270 /* MODE is used in the comparison, so it should be in the hash. */
271 h ^= GET_MODE (value);
272 return h;
273}
274
275/* Returns nonzero if the value represented by X (really a ...)
276 is the same as that represented by Y (really a ...). */
277
278static int
279const_fixed_htab_eq (const void *x, const void *y)
280{
3101faab 281 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
282
283 if (GET_MODE (a) != GET_MODE (b))
284 return 0;
285 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
286}
287
173b24b9
RK
288/* Returns a hash code for X (which is a really a mem_attrs *). */
289
290static hashval_t
502b8322 291mem_attrs_htab_hash (const void *x)
173b24b9 292{
f7d504c2 293 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
294
295 return (p->alias ^ (p->align * 1000)
296 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
297 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
78b76d08 298 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
299}
300
cc2902df 301/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
302 mem_attrs *) is the same as that given by Y (which is also really a
303 mem_attrs *). */
c13e8210
MM
304
305static int
502b8322 306mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 307{
741ac903
KG
308 const mem_attrs *const p = (const mem_attrs *) x;
309 const mem_attrs *const q = (const mem_attrs *) y;
173b24b9 310
78b76d08
SB
311 return (p->alias == q->alias && p->offset == q->offset
312 && p->size == q->size && p->align == q->align
313 && (p->expr == q->expr
314 || (p->expr != NULL_TREE && q->expr != NULL_TREE
315 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
316}
317
173b24b9 318/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
319 one identical to it is not already in the table. We are doing this for
320 MEM of mode MODE. */
173b24b9
RK
321
322static mem_attrs *
4862826d 323get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
502b8322 324 unsigned int align, enum machine_mode mode)
173b24b9
RK
325{
326 mem_attrs attrs;
327 void **slot;
328
bb056a77
OH
329 /* If everything is the default, we can just return zero.
330 This must match what the corresponding MEM_* macros return when the
331 field is not present. */
998d7deb 332 if (alias == 0 && expr == 0 && offset == 0
10b76d73
RK
333 && (size == 0
334 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
335 && (STRICT_ALIGNMENT && mode != BLKmode
336 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
337 return 0;
338
173b24b9 339 attrs.alias = alias;
998d7deb 340 attrs.expr = expr;
173b24b9
RK
341 attrs.offset = offset;
342 attrs.size = size;
343 attrs.align = align;
344
345 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
346 if (*slot == 0)
347 {
348 *slot = ggc_alloc (sizeof (mem_attrs));
349 memcpy (*slot, &attrs, sizeof (mem_attrs));
350 }
351
1b4572a8 352 return (mem_attrs *) *slot;
c13e8210
MM
353}
354
a560d4d4
JH
355/* Returns a hash code for X (which is a really a reg_attrs *). */
356
357static hashval_t
502b8322 358reg_attrs_htab_hash (const void *x)
a560d4d4 359{
741ac903 360 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4
JH
361
362 return ((p->offset * 1000) ^ (long) p->decl);
363}
364
6356f892 365/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
366 reg_attrs *) is the same as that given by Y (which is also really a
367 reg_attrs *). */
368
369static int
502b8322 370reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 371{
741ac903
KG
372 const reg_attrs *const p = (const reg_attrs *) x;
373 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
374
375 return (p->decl == q->decl && p->offset == q->offset);
376}
377/* Allocate a new reg_attrs structure and insert it into the hash table if
378 one identical to it is not already in the table. We are doing this for
379 MEM of mode MODE. */
380
381static reg_attrs *
502b8322 382get_reg_attrs (tree decl, int offset)
a560d4d4
JH
383{
384 reg_attrs attrs;
385 void **slot;
386
387 /* If everything is the default, we can just return zero. */
388 if (decl == 0 && offset == 0)
389 return 0;
390
391 attrs.decl = decl;
392 attrs.offset = offset;
393
394 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
395 if (*slot == 0)
396 {
397 *slot = ggc_alloc (sizeof (reg_attrs));
398 memcpy (*slot, &attrs, sizeof (reg_attrs));
399 }
400
1b4572a8 401 return (reg_attrs *) *slot;
a560d4d4
JH
402}
403
6fb5fa3c
DB
404
405#if !HAVE_blockage
406/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
407 across this insn. */
408
409rtx
410gen_blockage (void)
411{
412 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
413 MEM_VOLATILE_P (x) = true;
414 return x;
415}
416#endif
417
418
08394eef
BS
419/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
420 don't attempt to share with the various global pieces of rtl (such as
421 frame_pointer_rtx). */
422
423rtx
502b8322 424gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
425{
426 rtx x = gen_rtx_raw_REG (mode, regno);
427 ORIGINAL_REGNO (x) = regno;
428 return x;
429}
430
c5c76735
JL
431/* There are some RTL codes that require special attention; the generation
432 functions do the raw handling. If you add to this list, modify
433 special_rtx in gengenrtl.c as well. */
434
3b80f6ca 435rtx
502b8322 436gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 437{
c13e8210
MM
438 void **slot;
439
3b80f6ca 440 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 441 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
442
443#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
444 if (const_true_rtx && arg == STORE_FLAG_VALUE)
445 return const_true_rtx;
446#endif
447
c13e8210 448 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
449 slot = htab_find_slot_with_hash (const_int_htab, &arg,
450 (hashval_t) arg, INSERT);
29105cea 451 if (*slot == 0)
1f8f4a0b 452 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
453
454 return (rtx) *slot;
3b80f6ca
RH
455}
456
2496c7bd 457rtx
502b8322 458gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
459{
460 return GEN_INT (trunc_int_for_mode (c, mode));
461}
462
5692c7bc
ZW
463/* CONST_DOUBLEs might be created from pairs of integers, or from
464 REAL_VALUE_TYPEs. Also, their length is known only at run time,
465 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
466
467/* Determine whether REAL, a CONST_DOUBLE, already exists in the
468 hash table. If so, return its counterpart; otherwise add it
469 to the hash table and return it. */
470static rtx
502b8322 471lookup_const_double (rtx real)
5692c7bc
ZW
472{
473 void **slot = htab_find_slot (const_double_htab, real, INSERT);
474 if (*slot == 0)
475 *slot = real;
476
477 return (rtx) *slot;
478}
29105cea 479
5692c7bc
ZW
480/* Return a CONST_DOUBLE rtx for a floating-point value specified by
481 VALUE in mode MODE. */
0133b7d9 482rtx
502b8322 483const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 484{
5692c7bc
ZW
485 rtx real = rtx_alloc (CONST_DOUBLE);
486 PUT_MODE (real, mode);
487
9e254451 488 real->u.rv = value;
5692c7bc
ZW
489
490 return lookup_const_double (real);
491}
492
091a3ac7
CF
493/* Determine whether FIXED, a CONST_FIXED, already exists in the
494 hash table. If so, return its counterpart; otherwise add it
495 to the hash table and return it. */
496
497static rtx
498lookup_const_fixed (rtx fixed)
499{
500 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
501 if (*slot == 0)
502 *slot = fixed;
503
504 return (rtx) *slot;
505}
506
507/* Return a CONST_FIXED rtx for a fixed-point value specified by
508 VALUE in mode MODE. */
509
510rtx
511const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
512{
513 rtx fixed = rtx_alloc (CONST_FIXED);
514 PUT_MODE (fixed, mode);
515
516 fixed->u.fv = value;
517
518 return lookup_const_fixed (fixed);
519}
520
5692c7bc
ZW
521/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
522 of ints: I0 is the low-order word and I1 is the high-order word.
523 Do not use this routine for non-integer modes; convert to
524 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
525
526rtx
502b8322 527immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
528{
529 rtx value;
530 unsigned int i;
531
65acccdd
ZD
532 /* There are the following cases (note that there are no modes with
533 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
534
535 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
536 gen_int_mode.
537 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
538 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
539 from copies of the sign bit, and sign of i0 and i1 are the same), then
540 we return a CONST_INT for i0.
541 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
542 if (mode != VOIDmode)
543 {
5b0264cb
NS
544 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
545 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
546 /* We can get a 0 for an error mark. */
547 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
548 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 549
65acccdd
ZD
550 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
551 return gen_int_mode (i0, mode);
552
553 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
5692c7bc
ZW
554 }
555
556 /* If this integer fits in one word, return a CONST_INT. */
557 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
558 return GEN_INT (i0);
559
560 /* We use VOIDmode for integers. */
561 value = rtx_alloc (CONST_DOUBLE);
562 PUT_MODE (value, VOIDmode);
563
564 CONST_DOUBLE_LOW (value) = i0;
565 CONST_DOUBLE_HIGH (value) = i1;
566
567 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
568 XWINT (value, i) = 0;
569
570 return lookup_const_double (value);
0133b7d9
RH
571}
572
3b80f6ca 573rtx
502b8322 574gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
575{
576 /* In case the MD file explicitly references the frame pointer, have
577 all such references point to the same frame pointer. This is
578 used during frame pointer elimination to distinguish the explicit
579 references to these registers from pseudos that happened to be
580 assigned to them.
581
582 If we have eliminated the frame pointer or arg pointer, we will
583 be using it as a normal register, for example as a spill
584 register. In such cases, we might be accessing it in a mode that
585 is not Pmode and therefore cannot use the pre-allocated rtx.
586
587 Also don't do this when we are making new REGs in reload, since
588 we don't want to get confused with the real pointers. */
589
590 if (mode == Pmode && !reload_in_progress)
591 {
e10c79fe
LB
592 if (regno == FRAME_POINTER_REGNUM
593 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
594 return frame_pointer_rtx;
595#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e10c79fe
LB
596 if (regno == HARD_FRAME_POINTER_REGNUM
597 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
598 return hard_frame_pointer_rtx;
599#endif
600#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 601 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
602 return arg_pointer_rtx;
603#endif
604#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 605 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
606 return return_address_pointer_rtx;
607#endif
fc555370 608 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
2d67bd7b 609 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 610 return pic_offset_table_rtx;
bcb33994 611 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
612 return stack_pointer_rtx;
613 }
614
006a94b0 615#if 0
6cde4876 616 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
617 an existing entry in that table to avoid useless generation of RTL.
618
619 This code is disabled for now until we can fix the various backends
620 which depend on having non-shared hard registers in some cases. Long
621 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
622 on the amount of useless RTL that gets generated.
623
624 We'll also need to fix some code that runs after reload that wants to
625 set ORIGINAL_REGNO. */
626
6cde4876
JL
627 if (cfun
628 && cfun->emit
629 && regno_reg_rtx
630 && regno < FIRST_PSEUDO_REGISTER
631 && reg_raw_mode[regno] == mode)
632 return regno_reg_rtx[regno];
006a94b0 633#endif
6cde4876 634
08394eef 635 return gen_raw_REG (mode, regno);
3b80f6ca
RH
636}
637
41472af8 638rtx
502b8322 639gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
640{
641 rtx rt = gen_rtx_raw_MEM (mode, addr);
642
643 /* This field is not cleared by the mere allocation of the rtx, so
644 we clear it here. */
173b24b9 645 MEM_ATTRS (rt) = 0;
41472af8
MM
646
647 return rt;
648}
ddef6bc7 649
542a8afa
RH
650/* Generate a memory referring to non-trapping constant memory. */
651
652rtx
653gen_const_mem (enum machine_mode mode, rtx addr)
654{
655 rtx mem = gen_rtx_MEM (mode, addr);
656 MEM_READONLY_P (mem) = 1;
657 MEM_NOTRAP_P (mem) = 1;
658 return mem;
659}
660
bf877a76
R
661/* Generate a MEM referring to fixed portions of the frame, e.g., register
662 save areas. */
663
664rtx
665gen_frame_mem (enum machine_mode mode, rtx addr)
666{
667 rtx mem = gen_rtx_MEM (mode, addr);
668 MEM_NOTRAP_P (mem) = 1;
669 set_mem_alias_set (mem, get_frame_alias_set ());
670 return mem;
671}
672
673/* Generate a MEM referring to a temporary use of the stack, not part
674 of the fixed stack frame. For example, something which is pushed
675 by a target splitter. */
676rtx
677gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
678{
679 rtx mem = gen_rtx_MEM (mode, addr);
680 MEM_NOTRAP_P (mem) = 1;
e3b5732b 681 if (!cfun->calls_alloca)
bf877a76
R
682 set_mem_alias_set (mem, get_frame_alias_set ());
683 return mem;
684}
685
beb72684
RH
686/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
687 this construct would be valid, and false otherwise. */
688
689bool
690validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 691 const_rtx reg, unsigned int offset)
ddef6bc7 692{
beb72684
RH
693 unsigned int isize = GET_MODE_SIZE (imode);
694 unsigned int osize = GET_MODE_SIZE (omode);
695
696 /* All subregs must be aligned. */
697 if (offset % osize != 0)
698 return false;
699
700 /* The subreg offset cannot be outside the inner object. */
701 if (offset >= isize)
702 return false;
703
704 /* ??? This should not be here. Temporarily continue to allow word_mode
705 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
706 Generally, backends are doing something sketchy but it'll take time to
707 fix them all. */
708 if (omode == word_mode)
709 ;
710 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
711 is the culprit here, and not the backends. */
712 else if (osize >= UNITS_PER_WORD && isize >= osize)
713 ;
714 /* Allow component subregs of complex and vector. Though given the below
715 extraction rules, it's not always clear what that means. */
716 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
717 && GET_MODE_INNER (imode) == omode)
718 ;
719 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
720 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
721 represent this. It's questionable if this ought to be represented at
722 all -- why can't this all be hidden in post-reload splitters that make
723 arbitrarily mode changes to the registers themselves. */
724 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
725 ;
726 /* Subregs involving floating point modes are not allowed to
727 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
728 (subreg:SI (reg:DF) 0) isn't. */
729 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
730 {
731 if (isize != osize)
732 return false;
733 }
ddef6bc7 734
beb72684
RH
735 /* Paradoxical subregs must have offset zero. */
736 if (osize > isize)
737 return offset == 0;
738
739 /* This is a normal subreg. Verify that the offset is representable. */
740
741 /* For hard registers, we already have most of these rules collected in
742 subreg_offset_representable_p. */
743 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
744 {
745 unsigned int regno = REGNO (reg);
746
747#ifdef CANNOT_CHANGE_MODE_CLASS
748 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
749 && GET_MODE_INNER (imode) == omode)
750 ;
751 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
752 return false;
ddef6bc7 753#endif
beb72684
RH
754
755 return subreg_offset_representable_p (regno, imode, offset, omode);
756 }
757
758 /* For pseudo registers, we want most of the same checks. Namely:
759 If the register no larger than a word, the subreg must be lowpart.
760 If the register is larger than a word, the subreg must be the lowpart
761 of a subword. A subreg does *not* perform arbitrary bit extraction.
762 Given that we've already checked mode/offset alignment, we only have
763 to check subword subregs here. */
764 if (osize < UNITS_PER_WORD)
765 {
766 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
767 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
768 if (offset % UNITS_PER_WORD != low_off)
769 return false;
770 }
771 return true;
772}
773
774rtx
775gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
776{
777 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 778 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
779}
780
173b24b9
RK
781/* Generate a SUBREG representing the least-significant part of REG if MODE
782 is smaller than mode of REG, otherwise paradoxical SUBREG. */
783
ddef6bc7 784rtx
502b8322 785gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
786{
787 enum machine_mode inmode;
ddef6bc7
JJ
788
789 inmode = GET_MODE (reg);
790 if (inmode == VOIDmode)
791 inmode = mode;
e0e08ac2
JH
792 return gen_rtx_SUBREG (mode, reg,
793 subreg_lowpart_offset (mode, inmode));
ddef6bc7 794}
c5c76735 795\f
23b2ce53
RS
796/* gen_rtvec (n, [rt1, ..., rtn])
797**
798** This routine creates an rtvec and stores within it the
799** pointers to rtx's which are its arguments.
800*/
801
802/*VARARGS1*/
803rtvec
e34d07f2 804gen_rtvec (int n, ...)
23b2ce53 805{
6268b922 806 int i, save_n;
23b2ce53 807 rtx *vector;
e34d07f2 808 va_list p;
23b2ce53 809
e34d07f2 810 va_start (p, n);
23b2ce53
RS
811
812 if (n == 0)
813 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
814
1b4572a8 815 vector = XALLOCAVEC (rtx, n);
4f90e4a0 816
23b2ce53
RS
817 for (i = 0; i < n; i++)
818 vector[i] = va_arg (p, rtx);
6268b922
KG
819
820 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
821 save_n = n;
e34d07f2 822 va_end (p);
23b2ce53 823
6268b922 824 return gen_rtvec_v (save_n, vector);
23b2ce53
RS
825}
826
827rtvec
502b8322 828gen_rtvec_v (int n, rtx *argp)
23b2ce53 829{
b3694847
SS
830 int i;
831 rtvec rt_val;
23b2ce53
RS
832
833 if (n == 0)
834 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
835
836 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
837
838 for (i = 0; i < n; i++)
8f985ec4 839 rt_val->elem[i] = *argp++;
23b2ce53
RS
840
841 return rt_val;
842}
843\f
38ae7651
RS
844/* Return the number of bytes between the start of an OUTER_MODE
845 in-memory value and the start of an INNER_MODE in-memory value,
846 given that the former is a lowpart of the latter. It may be a
847 paradoxical lowpart, in which case the offset will be negative
848 on big-endian targets. */
849
850int
851byte_lowpart_offset (enum machine_mode outer_mode,
852 enum machine_mode inner_mode)
853{
854 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
855 return subreg_lowpart_offset (outer_mode, inner_mode);
856 else
857 return -subreg_lowpart_offset (inner_mode, outer_mode);
858}
859\f
23b2ce53
RS
860/* Generate a REG rtx for a new pseudo register of mode MODE.
861 This pseudo is assigned the next sequential register number. */
862
863rtx
502b8322 864gen_reg_rtx (enum machine_mode mode)
23b2ce53 865{
b3694847 866 rtx val;
23b2ce53 867
f8335a4f 868 gcc_assert (can_create_pseudo_p ());
23b2ce53 869
1b3d8f8a
GK
870 if (generating_concat_p
871 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
872 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
873 {
874 /* For complex modes, don't make a single pseudo.
875 Instead, make a CONCAT of two pseudos.
876 This allows noncontiguous allocation of the real and imaginary parts,
877 which makes much better code. Besides, allocating DCmode
878 pseudos overstrains reload on some machines like the 386. */
879 rtx realpart, imagpart;
27e58a70 880 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
881
882 realpart = gen_reg_rtx (partmode);
883 imagpart = gen_reg_rtx (partmode);
3b80f6ca 884 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
885 }
886
a560d4d4 887 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 888 enough to have an element for this pseudo reg number. */
23b2ce53 889
3e029763 890 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 891 {
3e029763 892 int old_size = crtl->emit.regno_pointer_align_length;
60564289 893 char *tmp;
0d4903b8 894 rtx *new1;
0d4903b8 895
60564289
KG
896 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
897 memset (tmp + old_size, 0, old_size);
898 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 899
1b4572a8 900 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 901 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
902 regno_reg_rtx = new1;
903
3e029763 904 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
905 }
906
08394eef 907 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
908 regno_reg_rtx[reg_rtx_no++] = val;
909 return val;
910}
911
38ae7651
RS
912/* Update NEW with the same attributes as REG, but with OFFSET added
913 to the REG_OFFSET. */
a560d4d4 914
e53a16e7 915static void
60564289 916update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 917{
60564289 918 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 919 REG_OFFSET (reg) + offset);
e53a16e7
ILT
920}
921
38ae7651
RS
922/* Generate a register with same attributes as REG, but with OFFSET
923 added to the REG_OFFSET. */
e53a16e7
ILT
924
925rtx
926gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
927 int offset)
928{
60564289 929 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 930
60564289
KG
931 update_reg_offset (new_rtx, reg, offset);
932 return new_rtx;
e53a16e7
ILT
933}
934
935/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 936 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
937
938rtx
939gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
940{
60564289 941 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 942
60564289
KG
943 update_reg_offset (new_rtx, reg, offset);
944 return new_rtx;
a560d4d4
JH
945}
946
38ae7651
RS
947/* Adjust REG in-place so that it has mode MODE. It is assumed that the
948 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
949
950void
38ae7651 951adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 952{
38ae7651
RS
953 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
954 PUT_MODE (reg, mode);
955}
956
957/* Copy REG's attributes from X, if X has any attributes. If REG and X
958 have different modes, REG is a (possibly paradoxical) lowpart of X. */
959
960void
961set_reg_attrs_from_value (rtx reg, rtx x)
962{
963 int offset;
964
923ba36f
JJ
965 /* Hard registers can be reused for multiple purposes within the same
966 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
967 on them is wrong. */
968 if (HARD_REGISTER_P (reg))
969 return;
970
38ae7651 971 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
972 if (MEM_P (x))
973 {
974 if (MEM_OFFSET (x) && GET_CODE (MEM_OFFSET (x)) == CONST_INT)
975 REG_ATTRS (reg)
976 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
977 if (MEM_POINTER (x))
978 mark_reg_pointer (reg, MEM_ALIGN (x));
979 }
980 else if (REG_P (x))
981 {
982 if (REG_ATTRS (x))
983 update_reg_offset (reg, x, offset);
984 if (REG_POINTER (x))
985 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
986 }
987}
988
989/* Generate a REG rtx for a new pseudo register, copying the mode
990 and attributes from X. */
991
992rtx
993gen_reg_rtx_and_attrs (rtx x)
994{
995 rtx reg = gen_reg_rtx (GET_MODE (x));
996 set_reg_attrs_from_value (reg, x);
997 return reg;
a560d4d4
JH
998}
999
9d18e06b
JZ
1000/* Set the register attributes for registers contained in PARM_RTX.
1001 Use needed values from memory attributes of MEM. */
1002
1003void
502b8322 1004set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1005{
f8cfc6aa 1006 if (REG_P (parm_rtx))
38ae7651 1007 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1008 else if (GET_CODE (parm_rtx) == PARALLEL)
1009 {
1010 /* Check for a NULL entry in the first slot, used to indicate that the
1011 parameter goes both on the stack and in registers. */
1012 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1013 for (; i < XVECLEN (parm_rtx, 0); i++)
1014 {
1015 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1016 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1017 REG_ATTRS (XEXP (x, 0))
1018 = get_reg_attrs (MEM_EXPR (mem),
1019 INTVAL (XEXP (x, 1)));
1020 }
1021 }
1022}
1023
38ae7651
RS
1024/* Set the REG_ATTRS for registers in value X, given that X represents
1025 decl T. */
a560d4d4 1026
38ae7651
RS
1027static void
1028set_reg_attrs_for_decl_rtl (tree t, rtx x)
1029{
1030 if (GET_CODE (x) == SUBREG)
fbe6ec81 1031 {
38ae7651
RS
1032 gcc_assert (subreg_lowpart_p (x));
1033 x = SUBREG_REG (x);
fbe6ec81 1034 }
f8cfc6aa 1035 if (REG_P (x))
38ae7651
RS
1036 REG_ATTRS (x)
1037 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1038 DECL_MODE (t)));
a560d4d4
JH
1039 if (GET_CODE (x) == CONCAT)
1040 {
1041 if (REG_P (XEXP (x, 0)))
1042 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1043 if (REG_P (XEXP (x, 1)))
1044 REG_ATTRS (XEXP (x, 1))
1045 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1046 }
1047 if (GET_CODE (x) == PARALLEL)
1048 {
d4afac5b
JZ
1049 int i, start;
1050
1051 /* Check for a NULL entry, used to indicate that the parameter goes
1052 both on the stack and in registers. */
1053 if (XEXP (XVECEXP (x, 0, 0), 0))
1054 start = 0;
1055 else
1056 start = 1;
1057
1058 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1059 {
1060 rtx y = XVECEXP (x, 0, i);
1061 if (REG_P (XEXP (y, 0)))
1062 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1063 }
1064 }
1065}
1066
38ae7651
RS
1067/* Assign the RTX X to declaration T. */
1068
1069void
1070set_decl_rtl (tree t, rtx x)
1071{
1072 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1073 if (x)
1074 set_reg_attrs_for_decl_rtl (t, x);
1075}
1076
5141868d
RS
1077/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1078 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1079
1080void
5141868d 1081set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1082{
1083 DECL_INCOMING_RTL (t) = x;
5141868d 1084 if (x && !by_reference_p)
38ae7651
RS
1085 set_reg_attrs_for_decl_rtl (t, x);
1086}
1087
754fdcca
RK
1088/* Identify REG (which may be a CONCAT) as a user register. */
1089
1090void
502b8322 1091mark_user_reg (rtx reg)
754fdcca
RK
1092{
1093 if (GET_CODE (reg) == CONCAT)
1094 {
1095 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1096 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1097 }
754fdcca 1098 else
5b0264cb
NS
1099 {
1100 gcc_assert (REG_P (reg));
1101 REG_USERVAR_P (reg) = 1;
1102 }
754fdcca
RK
1103}
1104
86fe05e0
RK
1105/* Identify REG as a probable pointer register and show its alignment
1106 as ALIGN, if nonzero. */
23b2ce53
RS
1107
1108void
502b8322 1109mark_reg_pointer (rtx reg, int align)
23b2ce53 1110{
3502dc9c 1111 if (! REG_POINTER (reg))
00995e78 1112 {
3502dc9c 1113 REG_POINTER (reg) = 1;
86fe05e0 1114
00995e78
RE
1115 if (align)
1116 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1117 }
1118 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1119 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1120 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1121}
1122
1123/* Return 1 plus largest pseudo reg number used in the current function. */
1124
1125int
502b8322 1126max_reg_num (void)
23b2ce53
RS
1127{
1128 return reg_rtx_no;
1129}
1130
1131/* Return 1 + the largest label number used so far in the current function. */
1132
1133int
502b8322 1134max_label_num (void)
23b2ce53 1135{
23b2ce53
RS
1136 return label_num;
1137}
1138
1139/* Return first label number used in this function (if any were used). */
1140
1141int
502b8322 1142get_first_label_num (void)
23b2ce53
RS
1143{
1144 return first_label_num;
1145}
6de9cd9a
DN
1146
1147/* If the rtx for label was created during the expansion of a nested
1148 function, then first_label_num won't include this label number.
fa10beec 1149 Fix this now so that array indices work later. */
6de9cd9a
DN
1150
1151void
1152maybe_set_first_label_num (rtx x)
1153{
1154 if (CODE_LABEL_NUMBER (x) < first_label_num)
1155 first_label_num = CODE_LABEL_NUMBER (x);
1156}
23b2ce53
RS
1157\f
1158/* Return a value representing some low-order bits of X, where the number
1159 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1160 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1161 representation is returned.
1162
1163 This function handles the cases in common between gen_lowpart, below,
1164 and two variants in cse.c and combine.c. These are the cases that can
1165 be safely handled at all points in the compilation.
1166
1167 If this is not a case we can handle, return 0. */
1168
1169rtx
502b8322 1170gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1171{
ddef6bc7 1172 int msize = GET_MODE_SIZE (mode);
550d1387 1173 int xsize;
ddef6bc7 1174 int offset = 0;
550d1387
GK
1175 enum machine_mode innermode;
1176
1177 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1178 so we have to make one up. Yuk. */
1179 innermode = GET_MODE (x);
db487452
R
1180 if (GET_CODE (x) == CONST_INT
1181 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1182 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1183 else if (innermode == VOIDmode)
1184 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1185
1186 xsize = GET_MODE_SIZE (innermode);
1187
5b0264cb 1188 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1189
550d1387 1190 if (innermode == mode)
23b2ce53
RS
1191 return x;
1192
1193 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1194 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1195 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1196 return 0;
1197
53501a19 1198 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1199 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1200 return 0;
1201
550d1387 1202 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1203
1204 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1205 && (GET_MODE_CLASS (mode) == MODE_INT
1206 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1207 {
1208 /* If we are getting the low-order part of something that has been
1209 sign- or zero-extended, we can either just use the object being
1210 extended or make a narrower extension. If we want an even smaller
1211 piece than the size of the object being extended, call ourselves
1212 recursively.
1213
1214 This case is used mostly by combine and cse. */
1215
1216 if (GET_MODE (XEXP (x, 0)) == mode)
1217 return XEXP (x, 0);
550d1387 1218 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1219 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1220 else if (msize < xsize)
3b80f6ca 1221 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1222 }
f8cfc6aa 1223 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387
GK
1224 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1225 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1226 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1227
23b2ce53
RS
1228 /* Otherwise, we can't do this. */
1229 return 0;
1230}
1231\f
ccba022b 1232rtx
502b8322 1233gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1234{
ddef6bc7 1235 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1236 rtx result;
ddef6bc7 1237
ccba022b
RS
1238 /* This case loses if X is a subreg. To catch bugs early,
1239 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1240 gcc_assert (msize <= UNITS_PER_WORD
1241 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1242
e0e08ac2
JH
1243 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1244 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb
NS
1245 gcc_assert (result);
1246
09482e0d
JW
1247 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1248 the target if we have a MEM. gen_highpart must return a valid operand,
1249 emitting code if necessary to do so. */
5b0264cb
NS
1250 if (MEM_P (result))
1251 {
1252 result = validize_mem (result);
1253 gcc_assert (result);
1254 }
1255
e0e08ac2
JH
1256 return result;
1257}
5222e470 1258
26d249eb 1259/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1260 be VOIDmode constant. */
1261rtx
502b8322 1262gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1263{
1264 if (GET_MODE (exp) != VOIDmode)
1265 {
5b0264cb 1266 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1267 return gen_highpart (outermode, exp);
1268 }
1269 return simplify_gen_subreg (outermode, exp, innermode,
1270 subreg_highpart_offset (outermode, innermode));
1271}
68252e27 1272
38ae7651 1273/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1274
e0e08ac2 1275unsigned int
502b8322 1276subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1277{
1278 unsigned int offset = 0;
1279 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1280
e0e08ac2 1281 if (difference > 0)
ccba022b 1282 {
e0e08ac2
JH
1283 if (WORDS_BIG_ENDIAN)
1284 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1285 if (BYTES_BIG_ENDIAN)
1286 offset += difference % UNITS_PER_WORD;
ccba022b 1287 }
ddef6bc7 1288
e0e08ac2 1289 return offset;
ccba022b 1290}
eea50aa0 1291
e0e08ac2
JH
1292/* Return offset in bytes to get OUTERMODE high part
1293 of the value in mode INNERMODE stored in memory in target format. */
1294unsigned int
502b8322 1295subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1296{
1297 unsigned int offset = 0;
1298 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1299
5b0264cb 1300 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1301
eea50aa0
JH
1302 if (difference > 0)
1303 {
e0e08ac2 1304 if (! WORDS_BIG_ENDIAN)
eea50aa0 1305 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1306 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1307 offset += difference % UNITS_PER_WORD;
1308 }
1309
e0e08ac2 1310 return offset;
eea50aa0 1311}
ccba022b 1312
23b2ce53
RS
1313/* Return 1 iff X, assumed to be a SUBREG,
1314 refers to the least significant part of its containing reg.
1315 If X is not a SUBREG, always return 1 (it is its own low part!). */
1316
1317int
fa233e34 1318subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1319{
1320 if (GET_CODE (x) != SUBREG)
1321 return 1;
a3a03040
RK
1322 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1323 return 0;
23b2ce53 1324
e0e08ac2
JH
1325 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1326 == SUBREG_BYTE (x));
23b2ce53
RS
1327}
1328\f
ddef6bc7
JJ
1329/* Return subword OFFSET of operand OP.
1330 The word number, OFFSET, is interpreted as the word number starting
1331 at the low-order address. OFFSET 0 is the low-order word if not
1332 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1333
1334 If we cannot extract the required word, we return zero. Otherwise,
1335 an rtx corresponding to the requested word will be returned.
1336
1337 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1338 reload has completed, a valid address will always be returned. After
1339 reload, if a valid address cannot be returned, we return zero.
1340
1341 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1342 it is the responsibility of the caller.
1343
1344 MODE is the mode of OP in case it is a CONST_INT.
1345
1346 ??? This is still rather broken for some cases. The problem for the
1347 moment is that all callers of this thing provide no 'goal mode' to
1348 tell us to work with. This exists because all callers were written
0631e0bf
JH
1349 in a word based SUBREG world.
1350 Now use of this function can be deprecated by simplify_subreg in most
1351 cases.
1352 */
ddef6bc7
JJ
1353
1354rtx
502b8322 1355operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1356{
1357 if (mode == VOIDmode)
1358 mode = GET_MODE (op);
1359
5b0264cb 1360 gcc_assert (mode != VOIDmode);
ddef6bc7 1361
30f7a378 1362 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1363 if (mode != BLKmode
1364 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1365 return 0;
1366
30f7a378 1367 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1368 if (mode != BLKmode
1369 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1370 return const0_rtx;
1371
ddef6bc7 1372 /* Form a new MEM at the requested address. */
3c0cb5de 1373 if (MEM_P (op))
ddef6bc7 1374 {
60564289 1375 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1376
f1ec5147 1377 if (! validate_address)
60564289 1378 return new_rtx;
f1ec5147
RK
1379
1380 else if (reload_completed)
ddef6bc7 1381 {
60564289 1382 if (! strict_memory_address_p (word_mode, XEXP (new_rtx, 0)))
f1ec5147 1383 return 0;
ddef6bc7 1384 }
f1ec5147 1385 else
60564289 1386 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1387 }
1388
0631e0bf
JH
1389 /* Rest can be handled by simplify_subreg. */
1390 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1391}
1392
535a42b1
NS
1393/* Similar to `operand_subword', but never return 0. If we can't
1394 extract the required subword, put OP into a register and try again.
1395 The second attempt must succeed. We always validate the address in
1396 this case.
23b2ce53
RS
1397
1398 MODE is the mode of OP, in case it is CONST_INT. */
1399
1400rtx
502b8322 1401operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1402{
ddef6bc7 1403 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1404
1405 if (result)
1406 return result;
1407
1408 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1409 {
1410 /* If this is a register which can not be accessed by words, copy it
1411 to a pseudo register. */
f8cfc6aa 1412 if (REG_P (op))
77e6b0eb
JC
1413 op = copy_to_reg (op);
1414 else
1415 op = force_reg (mode, op);
1416 }
23b2ce53 1417
ddef6bc7 1418 result = operand_subword (op, offset, 1, mode);
5b0264cb 1419 gcc_assert (result);
23b2ce53
RS
1420
1421 return result;
1422}
1423\f
998d7deb
RH
1424/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1425 or (2) a component ref of something variable. Represent the later with
1426 a NULL expression. */
1427
1428static tree
502b8322 1429component_ref_for_mem_expr (tree ref)
998d7deb
RH
1430{
1431 tree inner = TREE_OPERAND (ref, 0);
1432
1433 if (TREE_CODE (inner) == COMPONENT_REF)
1434 inner = component_ref_for_mem_expr (inner);
c56e3582
RK
1435 else
1436 {
c56e3582 1437 /* Now remove any conversions: they don't change what the underlying
6fce44af 1438 object is. Likewise for SAVE_EXPR. */
1043771b 1439 while (CONVERT_EXPR_P (inner)
c56e3582 1440 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
6fce44af
RK
1441 || TREE_CODE (inner) == SAVE_EXPR)
1442 inner = TREE_OPERAND (inner, 0);
c56e3582
RK
1443
1444 if (! DECL_P (inner))
1445 inner = NULL_TREE;
1446 }
998d7deb
RH
1447
1448 if (inner == TREE_OPERAND (ref, 0))
1449 return ref;
1450 else
3244e67d
RS
1451 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1452 TREE_OPERAND (ref, 1), NULL_TREE);
998d7deb 1453}
173b24b9 1454
2b3493c8
AK
1455/* Returns 1 if both MEM_EXPR can be considered equal
1456 and 0 otherwise. */
1457
1458int
4f588890 1459mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1460{
1461 if (expr1 == expr2)
1462 return 1;
1463
1464 if (! expr1 || ! expr2)
1465 return 0;
1466
1467 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1468 return 0;
1469
1470 if (TREE_CODE (expr1) == COMPONENT_REF)
1471 return
1472 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1473 TREE_OPERAND (expr2, 0))
1474 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1475 TREE_OPERAND (expr2, 1));
1476
1b096a0a 1477 if (INDIRECT_REF_P (expr1))
2b3493c8
AK
1478 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1479 TREE_OPERAND (expr2, 0));
2b3493c8 1480
5b0264cb 1481 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
2b3493c8 1482 have been resolved here. */
5b0264cb
NS
1483 gcc_assert (DECL_P (expr1));
1484
1485 /* Decls with different pointers can't be equal. */
1486 return 0;
2b3493c8
AK
1487}
1488
6926c713 1489/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1490 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1491 if we are making a new object of this type. BITPOS is nonzero if
1492 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1493
1494void
502b8322
AJ
1495set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1496 HOST_WIDE_INT bitpos)
173b24b9 1497{
4862826d 1498 alias_set_type alias = MEM_ALIAS_SET (ref);
998d7deb 1499 tree expr = MEM_EXPR (ref);
8ac61af7
RK
1500 rtx offset = MEM_OFFSET (ref);
1501 rtx size = MEM_SIZE (ref);
1502 unsigned int align = MEM_ALIGN (ref);
6f1087be 1503 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1504 tree type;
1505
1506 /* It can happen that type_for_mode was given a mode for which there
1507 is no language-level type. In which case it returns NULL, which
1508 we can see here. */
1509 if (t == NULL_TREE)
1510 return;
1511
1512 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1513 if (type == error_mark_node)
1514 return;
173b24b9 1515
173b24b9
RK
1516 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1517 wrong answer, as it assumes that DECL_RTL already has the right alias
1518 info. Callers should not set DECL_RTL until after the call to
1519 set_mem_attributes. */
5b0264cb 1520 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1521
738cc472 1522 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1523 front-end routine) and use it. */
1524 alias = get_alias_set (t);
173b24b9 1525
a5e9c810 1526 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
07cb6e8c
JM
1527 MEM_IN_STRUCT_P (ref)
1528 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
f8ad8d7c 1529 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1530
8ac61af7
RK
1531 /* If we are making an object of this type, or if this is a DECL, we know
1532 that it is a scalar if the type is not an aggregate. */
07cb6e8c
JM
1533 if ((objectp || DECL_P (t))
1534 && ! AGGREGATE_TYPE_P (type)
1535 && TREE_CODE (type) != COMPLEX_TYPE)
173b24b9
RK
1536 MEM_SCALAR_P (ref) = 1;
1537
c3d32120
RK
1538 /* We can set the alignment from the type if we are making an object,
1539 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
7ccf35ed
DN
1540 if (objectp || TREE_CODE (t) == INDIRECT_REF
1541 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1542 || TYPE_ALIGN_OK (type))
c3d32120 1543 align = MAX (align, TYPE_ALIGN (type));
7ccf35ed
DN
1544 else
1545 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1546 {
1547 if (integer_zerop (TREE_OPERAND (t, 1)))
1548 /* We don't know anything about the alignment. */
1549 align = BITS_PER_UNIT;
1550 else
1551 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1552 }
40c0668b 1553
738cc472
RK
1554 /* If the size is known, we can set that. */
1555 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1556 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1557
80965c18
RK
1558 /* If T is not a type, we may be able to deduce some more information about
1559 the expression. */
1560 if (! TYPE_P (t))
8ac61af7 1561 {
8476af98 1562 tree base;
389fdba0 1563
8ac61af7
RK
1564 if (TREE_THIS_VOLATILE (t))
1565 MEM_VOLATILE_P (ref) = 1;
173b24b9 1566
c56e3582
RK
1567 /* Now remove any conversions: they don't change what the underlying
1568 object is. Likewise for SAVE_EXPR. */
1043771b 1569 while (CONVERT_EXPR_P (t)
c56e3582
RK
1570 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1571 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1572 t = TREE_OPERAND (t, 0);
1573
8476af98
RH
1574 /* We may look through structure-like accesses for the purposes of
1575 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1576 base = t;
1577 while (TREE_CODE (base) == COMPONENT_REF
1578 || TREE_CODE (base) == REALPART_EXPR
1579 || TREE_CODE (base) == IMAGPART_EXPR
1580 || TREE_CODE (base) == BIT_FIELD_REF)
1581 base = TREE_OPERAND (base, 0);
1582
1583 if (DECL_P (base))
1584 {
1585 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1586 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1587 else
1588 MEM_NOTRAP_P (ref) = 1;
1589 }
1590 else
1591 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1592
1593 base = get_base_address (base);
1594 if (base && DECL_P (base)
1595 && TREE_READONLY (base)
1596 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1597 {
1598 tree base_type = TREE_TYPE (base);
1599 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1600 || DECL_ARTIFICIAL (base));
1601 MEM_READONLY_P (ref) = 1;
1602 }
1603
2039d7aa
RH
1604 /* If this expression uses it's parent's alias set, mark it such
1605 that we won't change it. */
1606 if (component_uses_parent_alias_set (t))
10b76d73
RK
1607 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1608
8ac61af7
RK
1609 /* If this is a decl, set the attributes of the MEM from it. */
1610 if (DECL_P (t))
1611 {
998d7deb
RH
1612 expr = t;
1613 offset = const0_rtx;
6f1087be 1614 apply_bitpos = bitpos;
8ac61af7
RK
1615 size = (DECL_SIZE_UNIT (t)
1616 && host_integerp (DECL_SIZE_UNIT (t), 1)
1617 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1618 align = DECL_ALIGN (t);
8ac61af7
RK
1619 }
1620
40c0668b 1621 /* If this is a constant, we know the alignment. */
6615c446 1622 else if (CONSTANT_CLASS_P (t))
9ddfb1a7
RK
1623 {
1624 align = TYPE_ALIGN (type);
1625#ifdef CONSTANT_ALIGNMENT
1626 align = CONSTANT_ALIGNMENT (t, align);
1627#endif
1628 }
998d7deb
RH
1629
1630 /* If this is a field reference and not a bit-field, record it. */
fa10beec 1631 /* ??? There is some information that can be gleaned from bit-fields,
998d7deb
RH
1632 such as the word offset in the structure that might be modified.
1633 But skip it for now. */
1634 else if (TREE_CODE (t) == COMPONENT_REF
1635 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1636 {
1637 expr = component_ref_for_mem_expr (t);
1638 offset = const0_rtx;
6f1087be 1639 apply_bitpos = bitpos;
998d7deb
RH
1640 /* ??? Any reason the field size would be different than
1641 the size we got from the type? */
1642 }
1643
1644 /* If this is an array reference, look for an outer field reference. */
1645 else if (TREE_CODE (t) == ARRAY_REF)
1646 {
1647 tree off_tree = size_zero_node;
1b1838b6
JW
1648 /* We can't modify t, because we use it at the end of the
1649 function. */
1650 tree t2 = t;
998d7deb
RH
1651
1652 do
1653 {
1b1838b6 1654 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1655 tree low_bound = array_ref_low_bound (t2);
1656 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1657
1658 /* We assume all arrays have sizes that are a multiple of a byte.
1659 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1660 index, then convert to sizetype and multiply by the size of
1661 the array element. */
1662 if (! integer_zerop (low_bound))
4845b383
KH
1663 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1664 index, low_bound);
2567406a 1665
44de5aeb 1666 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1667 size_binop (MULT_EXPR,
1668 fold_convert (sizetype,
1669 index),
44de5aeb
RK
1670 unit_size),
1671 off_tree);
1b1838b6 1672 t2 = TREE_OPERAND (t2, 0);
998d7deb 1673 }
1b1838b6 1674 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1675
1b1838b6 1676 if (DECL_P (t2))
c67a1cf6 1677 {
1b1838b6 1678 expr = t2;
40cb04f1 1679 offset = NULL;
c67a1cf6 1680 if (host_integerp (off_tree, 1))
40cb04f1
RH
1681 {
1682 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1683 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1684 align = DECL_ALIGN (t2);
fc555370 1685 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1
RH
1686 align = aoff;
1687 offset = GEN_INT (ioff);
6f1087be 1688 apply_bitpos = bitpos;
40cb04f1 1689 }
c67a1cf6 1690 }
1b1838b6 1691 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1692 {
1b1838b6 1693 expr = component_ref_for_mem_expr (t2);
998d7deb 1694 if (host_integerp (off_tree, 1))
6f1087be
RH
1695 {
1696 offset = GEN_INT (tree_low_cst (off_tree, 1));
1697 apply_bitpos = bitpos;
1698 }
998d7deb
RH
1699 /* ??? Any reason the field size would be different than
1700 the size we got from the type? */
1701 }
c67a1cf6 1702 else if (flag_argument_noalias > 1
1b096a0a 1703 && (INDIRECT_REF_P (t2))
1b1838b6 1704 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
c67a1cf6 1705 {
1b1838b6 1706 expr = t2;
c67a1cf6
RH
1707 offset = NULL;
1708 }
1709 }
1710
1711 /* If this is a Fortran indirect argument reference, record the
1712 parameter decl. */
1713 else if (flag_argument_noalias > 1
1b096a0a 1714 && (INDIRECT_REF_P (t))
c67a1cf6
RH
1715 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1716 {
1717 expr = t;
1718 offset = NULL;
998d7deb 1719 }
8ac61af7
RK
1720 }
1721
15c812e3 1722 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1723 bit position offset. Similarly, increase the size of the accessed
1724 object to contain the negative offset. */
6f1087be 1725 if (apply_bitpos)
8c317c5f
RH
1726 {
1727 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1728 if (size)
1729 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1730 }
6f1087be 1731
7ccf35ed
DN
1732 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1733 {
fa10beec 1734 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
7ccf35ed
DN
1735 we're overlapping. */
1736 offset = NULL;
1737 expr = NULL;
1738 }
1739
8ac61af7 1740 /* Now set the attributes we computed above. */
10b76d73 1741 MEM_ATTRS (ref)
998d7deb 1742 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
8ac61af7
RK
1743
1744 /* If this is already known to be a scalar or aggregate, we are done. */
1745 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1746 return;
1747
8ac61af7
RK
1748 /* If it is a reference into an aggregate, this is part of an aggregate.
1749 Otherwise we don't know. */
173b24b9
RK
1750 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1751 || TREE_CODE (t) == ARRAY_RANGE_REF
1752 || TREE_CODE (t) == BIT_FIELD_REF)
1753 MEM_IN_STRUCT_P (ref) = 1;
1754}
1755
6f1087be 1756void
502b8322 1757set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1758{
1759 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1760}
1761
895a8136 1762/* Set MEM to the decl that REG refers to. */
a560d4d4
JH
1763
1764void
502b8322 1765set_mem_attrs_from_reg (rtx mem, rtx reg)
a560d4d4
JH
1766{
1767 MEM_ATTRS (mem)
1768 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1769 GEN_INT (REG_OFFSET (reg)),
1770 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1771}
1772
173b24b9
RK
1773/* Set the alias set of MEM to SET. */
1774
1775void
4862826d 1776set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1777{
68252e27 1778#ifdef ENABLE_CHECKING
173b24b9 1779 /* If the new and old alias sets don't conflict, something is wrong. */
5b0264cb 1780 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
173b24b9
RK
1781#endif
1782
998d7deb 1783 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73
RK
1784 MEM_SIZE (mem), MEM_ALIGN (mem),
1785 GET_MODE (mem));
173b24b9 1786}
738cc472 1787
d022d93e 1788/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1789
1790void
502b8322 1791set_mem_align (rtx mem, unsigned int align)
738cc472 1792{
998d7deb 1793 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73
RK
1794 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1795 GET_MODE (mem));
738cc472 1796}
1285011e 1797
998d7deb 1798/* Set the expr for MEM to EXPR. */
1285011e
RK
1799
1800void
502b8322 1801set_mem_expr (rtx mem, tree expr)
1285011e
RK
1802{
1803 MEM_ATTRS (mem)
998d7deb 1804 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1285011e
RK
1805 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1806}
998d7deb
RH
1807
1808/* Set the offset of MEM to OFFSET. */
1809
1810void
502b8322 1811set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
1812{
1813 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1814 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1815 GET_MODE (mem));
35aff10b
AM
1816}
1817
1818/* Set the size of MEM to SIZE. */
1819
1820void
502b8322 1821set_mem_size (rtx mem, rtx size)
35aff10b
AM
1822{
1823 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1824 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1825 GET_MODE (mem));
998d7deb 1826}
173b24b9 1827\f
738cc472
RK
1828/* Return a memory reference like MEMREF, but with its mode changed to MODE
1829 and its address changed to ADDR. (VOIDmode means don't change the mode.
1830 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1831 returned memory location is required to be valid. The memory
1832 attributes are not changed. */
23b2ce53 1833
738cc472 1834static rtx
502b8322 1835change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1836{
60564289 1837 rtx new_rtx;
23b2ce53 1838
5b0264cb 1839 gcc_assert (MEM_P (memref));
23b2ce53
RS
1840 if (mode == VOIDmode)
1841 mode = GET_MODE (memref);
1842 if (addr == 0)
1843 addr = XEXP (memref, 0);
a74ff877
JH
1844 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1845 && (!validate || memory_address_p (mode, addr)))
1846 return memref;
23b2ce53 1847
f1ec5147 1848 if (validate)
23b2ce53 1849 {
f1ec5147 1850 if (reload_in_progress || reload_completed)
5b0264cb 1851 gcc_assert (memory_address_p (mode, addr));
f1ec5147
RK
1852 else
1853 addr = memory_address (mode, addr);
23b2ce53 1854 }
750c9258 1855
9b04c6a8
RK
1856 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1857 return memref;
1858
60564289
KG
1859 new_rtx = gen_rtx_MEM (mode, addr);
1860 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1861 return new_rtx;
23b2ce53 1862}
792760b9 1863
738cc472
RK
1864/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1865 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1866
1867rtx
502b8322 1868change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1869{
60564289
KG
1870 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1871 enum machine_mode mmode = GET_MODE (new_rtx);
4e44c1ef
JJ
1872 unsigned int align;
1873
1874 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1875 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
c2f7bcc3 1876
fdb1c7b3 1877 /* If there are no changes, just return the original memory reference. */
60564289 1878 if (new_rtx == memref)
4e44c1ef
JJ
1879 {
1880 if (MEM_ATTRS (memref) == 0
1881 || (MEM_EXPR (memref) == NULL
1882 && MEM_OFFSET (memref) == NULL
1883 && MEM_SIZE (memref) == size
1884 && MEM_ALIGN (memref) == align))
60564289 1885 return new_rtx;
4e44c1ef 1886
60564289
KG
1887 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1888 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 1889 }
fdb1c7b3 1890
60564289 1891 MEM_ATTRS (new_rtx)
4e44c1ef 1892 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
823e3574 1893
60564289 1894 return new_rtx;
f4ef873c 1895}
792760b9 1896
738cc472
RK
1897/* Return a memory reference like MEMREF, but with its mode changed
1898 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
1899 nonzero, the memory address is forced to be valid.
1900 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1901 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
1902
1903rtx
502b8322
AJ
1904adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1905 int validate, int adjust)
f1ec5147 1906{
823e3574 1907 rtx addr = XEXP (memref, 0);
60564289 1908 rtx new_rtx;
738cc472 1909 rtx memoffset = MEM_OFFSET (memref);
10b76d73 1910 rtx size = 0;
738cc472 1911 unsigned int memalign = MEM_ALIGN (memref);
823e3574 1912
fdb1c7b3
JH
1913 /* If there are no changes, just return the original memory reference. */
1914 if (mode == GET_MODE (memref) && !offset
1915 && (!validate || memory_address_p (mode, addr)))
1916 return memref;
1917
d14419e4 1918 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 1919 This may happen even if offset is nonzero -- consider
d14419e4
RH
1920 (plus (plus reg reg) const_int) -- so do this always. */
1921 addr = copy_rtx (addr);
1922
4a78c787
RH
1923 if (adjust)
1924 {
1925 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1926 object, we can merge it into the LO_SUM. */
1927 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1928 && offset >= 0
1929 && (unsigned HOST_WIDE_INT) offset
1930 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1931 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1932 plus_constant (XEXP (addr, 1), offset));
1933 else
1934 addr = plus_constant (addr, offset);
1935 }
823e3574 1936
60564289 1937 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472
RK
1938
1939 /* Compute the new values of the memory attributes due to this adjustment.
1940 We add the offsets and update the alignment. */
1941 if (memoffset)
1942 memoffset = GEN_INT (offset + INTVAL (memoffset));
1943
03bf2c23
RK
1944 /* Compute the new alignment by taking the MIN of the alignment and the
1945 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1946 if zero. */
1947 if (offset != 0)
3bf1e984
RK
1948 memalign
1949 = MIN (memalign,
1950 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 1951
10b76d73 1952 /* We can compute the size in a number of ways. */
60564289
KG
1953 if (GET_MODE (new_rtx) != BLKmode)
1954 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
10b76d73
RK
1955 else if (MEM_SIZE (memref))
1956 size = plus_constant (MEM_SIZE (memref), -offset);
1957
60564289
KG
1958 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1959 memoffset, size, memalign, GET_MODE (new_rtx));
738cc472
RK
1960
1961 /* At some point, we should validate that this offset is within the object,
1962 if all the appropriate values are known. */
60564289 1963 return new_rtx;
f1ec5147
RK
1964}
1965
630036c6
JJ
1966/* Return a memory reference like MEMREF, but with its mode changed
1967 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 1968 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
1969 nonzero, the memory address is forced to be valid. */
1970
1971rtx
502b8322
AJ
1972adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1973 HOST_WIDE_INT offset, int validate)
630036c6
JJ
1974{
1975 memref = change_address_1 (memref, VOIDmode, addr, validate);
1976 return adjust_address_1 (memref, mode, offset, validate, 0);
1977}
1978
8ac61af7
RK
1979/* Return a memory reference like MEMREF, but whose address is changed by
1980 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1981 known to be in OFFSET (possibly 1). */
0d4903b8
RK
1982
1983rtx
502b8322 1984offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 1985{
60564289 1986 rtx new_rtx, addr = XEXP (memref, 0);
e3c8ea67 1987
60564289 1988 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
e3c8ea67 1989
68252e27 1990 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 1991 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
1992
1993 However, if we did go and rearrange things, we can wind up not
1994 being able to recognize the magic around pic_offset_table_rtx.
1995 This stuff is fragile, and is yet another example of why it is
1996 bad to expose PIC machinery too early. */
60564289 1997 if (! memory_address_p (GET_MODE (memref), new_rtx)
e3c8ea67
RH
1998 && GET_CODE (addr) == PLUS
1999 && XEXP (addr, 0) == pic_offset_table_rtx)
2000 {
2001 addr = force_reg (GET_MODE (addr), addr);
60564289 2002 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
e3c8ea67
RH
2003 }
2004
60564289
KG
2005 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2006 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2007
fdb1c7b3 2008 /* If there are no changes, just return the original memory reference. */
60564289
KG
2009 if (new_rtx == memref)
2010 return new_rtx;
fdb1c7b3 2011
0d4903b8
RK
2012 /* Update the alignment to reflect the offset. Reset the offset, which
2013 we don't know. */
60564289 2014 MEM_ATTRS (new_rtx)
2cc2d4bb 2015 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 2016 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
60564289
KG
2017 GET_MODE (new_rtx));
2018 return new_rtx;
0d4903b8 2019}
68252e27 2020
792760b9
RK
2021/* Return a memory reference like MEMREF, but with its address changed to
2022 ADDR. The caller is asserting that the actual piece of memory pointed
2023 to is the same, just the form of the address is being changed, such as
2024 by putting something into a register. */
2025
2026rtx
502b8322 2027replace_equiv_address (rtx memref, rtx addr)
792760b9 2028{
738cc472
RK
2029 /* change_address_1 copies the memory attribute structure without change
2030 and that's exactly what we want here. */
40c0668b 2031 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2032 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2033}
738cc472 2034
f1ec5147
RK
2035/* Likewise, but the reference is not required to be valid. */
2036
2037rtx
502b8322 2038replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2039{
f1ec5147
RK
2040 return change_address_1 (memref, VOIDmode, addr, 0);
2041}
e7dfe4bb
RH
2042
2043/* Return a memory reference like MEMREF, but with its mode widened to
2044 MODE and offset by OFFSET. This would be used by targets that e.g.
2045 cannot issue QImode memory operations and have to use SImode memory
2046 operations plus masking logic. */
2047
2048rtx
502b8322 2049widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2050{
60564289
KG
2051 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2052 tree expr = MEM_EXPR (new_rtx);
2053 rtx memoffset = MEM_OFFSET (new_rtx);
e7dfe4bb
RH
2054 unsigned int size = GET_MODE_SIZE (mode);
2055
fdb1c7b3 2056 /* If there are no changes, just return the original memory reference. */
60564289
KG
2057 if (new_rtx == memref)
2058 return new_rtx;
fdb1c7b3 2059
e7dfe4bb
RH
2060 /* If we don't know what offset we were at within the expression, then
2061 we can't know if we've overstepped the bounds. */
fa1591cb 2062 if (! memoffset)
e7dfe4bb
RH
2063 expr = NULL_TREE;
2064
2065 while (expr)
2066 {
2067 if (TREE_CODE (expr) == COMPONENT_REF)
2068 {
2069 tree field = TREE_OPERAND (expr, 1);
44de5aeb 2070 tree offset = component_ref_field_offset (expr);
e7dfe4bb
RH
2071
2072 if (! DECL_SIZE_UNIT (field))
2073 {
2074 expr = NULL_TREE;
2075 break;
2076 }
2077
2078 /* Is the field at least as large as the access? If so, ok,
2079 otherwise strip back to the containing structure. */
03667700
RK
2080 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2081 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
2082 && INTVAL (memoffset) >= 0)
2083 break;
2084
44de5aeb 2085 if (! host_integerp (offset, 1))
e7dfe4bb
RH
2086 {
2087 expr = NULL_TREE;
2088 break;
2089 }
2090
2091 expr = TREE_OPERAND (expr, 0);
44de5aeb
RK
2092 memoffset
2093 = (GEN_INT (INTVAL (memoffset)
2094 + tree_low_cst (offset, 1)
2095 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2096 / BITS_PER_UNIT)));
e7dfe4bb
RH
2097 }
2098 /* Similarly for the decl. */
2099 else if (DECL_P (expr)
2100 && DECL_SIZE_UNIT (expr)
45f79783 2101 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
2102 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2103 && (! memoffset || INTVAL (memoffset) >= 0))
2104 break;
2105 else
2106 {
2107 /* The widened memory access overflows the expression, which means
2108 that it could alias another expression. Zap it. */
2109 expr = NULL_TREE;
2110 break;
2111 }
2112 }
2113
2114 if (! expr)
2115 memoffset = NULL_RTX;
2116
2117 /* The widened memory may alias other stuff, so zap the alias set. */
2118 /* ??? Maybe use get_alias_set on any remaining expression. */
2119
60564289
KG
2120 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2121 MEM_ALIGN (new_rtx), mode);
e7dfe4bb 2122
60564289 2123 return new_rtx;
e7dfe4bb 2124}
23b2ce53
RS
2125\f
2126/* Return a newly created CODE_LABEL rtx with a unique label number. */
2127
2128rtx
502b8322 2129gen_label_rtx (void)
23b2ce53 2130{
0dc36574 2131 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2132 NULL, label_num++, NULL);
23b2ce53
RS
2133}
2134\f
2135/* For procedure integration. */
2136
23b2ce53 2137/* Install new pointers to the first and last insns in the chain.
86fe05e0 2138 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2139 Used for an inline-procedure after copying the insn chain. */
2140
2141void
502b8322 2142set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2143{
86fe05e0
RK
2144 rtx insn;
2145
23b2ce53
RS
2146 first_insn = first;
2147 last_insn = last;
86fe05e0
RK
2148 cur_insn_uid = 0;
2149
2150 for (insn = first; insn; insn = NEXT_INSN (insn))
2151 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2152
2153 cur_insn_uid++;
23b2ce53 2154}
23b2ce53 2155\f
750c9258 2156/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2157 structure. This routine should only be called once. */
23b2ce53 2158
fd743bc1 2159static void
b4aaa77b 2160unshare_all_rtl_1 (rtx insn)
23b2ce53 2161{
d1b81779 2162 /* Unshare just about everything else. */
2c07f13b 2163 unshare_all_rtl_in_chain (insn);
750c9258 2164
23b2ce53
RS
2165 /* Make sure the addresses of stack slots found outside the insn chain
2166 (such as, in DECL_RTL of a variable) are not shared
2167 with the insn chain.
2168
2169 This special care is necessary when the stack slot MEM does not
2170 actually appear in the insn chain. If it does appear, its address
2171 is unshared from all else at that point. */
242b0ce6 2172 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2173}
2174
750c9258 2175/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2176 structure, again. This is a fairly expensive thing to do so it
2177 should be done sparingly. */
2178
2179void
502b8322 2180unshare_all_rtl_again (rtx insn)
d1b81779
GK
2181{
2182 rtx p;
624c87aa
RE
2183 tree decl;
2184
d1b81779 2185 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2186 if (INSN_P (p))
d1b81779
GK
2187 {
2188 reset_used_flags (PATTERN (p));
2189 reset_used_flags (REG_NOTES (p));
d1b81779 2190 }
624c87aa 2191
2d4aecb3 2192 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2193 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2194
624c87aa
RE
2195 /* Make sure that virtual parameters are not shared. */
2196 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
5eb2a9f2 2197 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2198
2199 reset_used_flags (stack_slot_list);
2200
b4aaa77b 2201 unshare_all_rtl_1 (insn);
fd743bc1
PB
2202}
2203
c2924966 2204unsigned int
fd743bc1
PB
2205unshare_all_rtl (void)
2206{
b4aaa77b 2207 unshare_all_rtl_1 (get_insns ());
c2924966 2208 return 0;
d1b81779
GK
2209}
2210
8ddbbcae 2211struct rtl_opt_pass pass_unshare_all_rtl =
ef330312 2212{
8ddbbcae
JH
2213 {
2214 RTL_PASS,
defb77dc 2215 "unshare", /* name */
ef330312
PB
2216 NULL, /* gate */
2217 unshare_all_rtl, /* execute */
2218 NULL, /* sub */
2219 NULL, /* next */
2220 0, /* static_pass_number */
2221 0, /* tv_id */
2222 0, /* properties_required */
2223 0, /* properties_provided */
2224 0, /* properties_destroyed */
2225 0, /* todo_flags_start */
8ddbbcae
JH
2226 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2227 }
ef330312
PB
2228};
2229
2230
2c07f13b
JH
2231/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2232 Recursively does the same for subexpressions. */
2233
2234static void
2235verify_rtx_sharing (rtx orig, rtx insn)
2236{
2237 rtx x = orig;
2238 int i;
2239 enum rtx_code code;
2240 const char *format_ptr;
2241
2242 if (x == 0)
2243 return;
2244
2245 code = GET_CODE (x);
2246
2247 /* These types may be freely shared. */
2248
2249 switch (code)
2250 {
2251 case REG:
2c07f13b
JH
2252 case CONST_INT:
2253 case CONST_DOUBLE:
091a3ac7 2254 case CONST_FIXED:
2c07f13b
JH
2255 case CONST_VECTOR:
2256 case SYMBOL_REF:
2257 case LABEL_REF:
2258 case CODE_LABEL:
2259 case PC:
2260 case CC0:
2261 case SCRATCH:
2c07f13b 2262 return;
3e89ed8d
JH
2263 /* SCRATCH must be shared because they represent distinct values. */
2264 case CLOBBER:
2265 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2266 return;
2267 break;
2c07f13b
JH
2268
2269 case CONST:
6fb5fa3c 2270 if (shared_const_p (orig))
2c07f13b
JH
2271 return;
2272 break;
2273
2274 case MEM:
2275 /* A MEM is allowed to be shared if its address is constant. */
2276 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2277 || reload_completed || reload_in_progress)
2278 return;
2279
2280 break;
2281
2282 default:
2283 break;
2284 }
2285
2286 /* This rtx may not be shared. If it has already been seen,
2287 replace it with a copy of itself. */
1a2caa7a 2288#ifdef ENABLE_CHECKING
2c07f13b
JH
2289 if (RTX_FLAG (x, used))
2290 {
ab532386 2291 error ("invalid rtl sharing found in the insn");
2c07f13b 2292 debug_rtx (insn);
ab532386 2293 error ("shared rtx");
2c07f13b 2294 debug_rtx (x);
ab532386 2295 internal_error ("internal consistency failure");
2c07f13b 2296 }
1a2caa7a
NS
2297#endif
2298 gcc_assert (!RTX_FLAG (x, used));
2299
2c07f13b
JH
2300 RTX_FLAG (x, used) = 1;
2301
6614fd40 2302 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2303
2304 format_ptr = GET_RTX_FORMAT (code);
2305
2306 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2307 {
2308 switch (*format_ptr++)
2309 {
2310 case 'e':
2311 verify_rtx_sharing (XEXP (x, i), insn);
2312 break;
2313
2314 case 'E':
2315 if (XVEC (x, i) != NULL)
2316 {
2317 int j;
2318 int len = XVECLEN (x, i);
2319
2320 for (j = 0; j < len; j++)
2321 {
1a2caa7a
NS
2322 /* We allow sharing of ASM_OPERANDS inside single
2323 instruction. */
2c07f13b 2324 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2325 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2326 == ASM_OPERANDS))
2c07f13b
JH
2327 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2328 else
2329 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2330 }
2331 }
2332 break;
2333 }
2334 }
2335 return;
2336}
2337
ba228239 2338/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2339 sharing in between the subexpressions. */
2340
2341void
2342verify_rtl_sharing (void)
2343{
2344 rtx p;
2345
2346 for (p = get_insns (); p; p = NEXT_INSN (p))
2347 if (INSN_P (p))
2348 {
2349 reset_used_flags (PATTERN (p));
2350 reset_used_flags (REG_NOTES (p));
2954a813
KK
2351 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2352 {
2353 int i;
2354 rtx q, sequence = PATTERN (p);
2355
2356 for (i = 0; i < XVECLEN (sequence, 0); i++)
2357 {
2358 q = XVECEXP (sequence, 0, i);
2359 gcc_assert (INSN_P (q));
2360 reset_used_flags (PATTERN (q));
2361 reset_used_flags (REG_NOTES (q));
2954a813
KK
2362 }
2363 }
2c07f13b
JH
2364 }
2365
2366 for (p = get_insns (); p; p = NEXT_INSN (p))
2367 if (INSN_P (p))
2368 {
2369 verify_rtx_sharing (PATTERN (p), p);
2370 verify_rtx_sharing (REG_NOTES (p), p);
2c07f13b
JH
2371 }
2372}
2373
d1b81779
GK
2374/* Go through all the RTL insn bodies and copy any invalid shared structure.
2375 Assumes the mark bits are cleared at entry. */
2376
2c07f13b
JH
2377void
2378unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2379{
2380 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2381 if (INSN_P (insn))
d1b81779
GK
2382 {
2383 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2384 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
d1b81779
GK
2385 }
2386}
2387
2d4aecb3 2388/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2389 shared. We never replace the DECL_RTLs themselves with a copy,
2390 but expressions mentioned into a DECL_RTL cannot be shared with
2391 expressions in the instruction stream.
2392
2393 Note that reload may convert pseudo registers into memories in-place.
2394 Pseudo registers are always shared, but MEMs never are. Thus if we
2395 reset the used flags on MEMs in the instruction stream, we must set
2396 them again on MEMs that appear in DECL_RTLs. */
2397
2d4aecb3 2398static void
5eb2a9f2 2399set_used_decls (tree blk)
2d4aecb3
AO
2400{
2401 tree t;
2402
2403 /* Mark decls. */
2404 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c 2405 if (DECL_RTL_SET_P (t))
5eb2a9f2 2406 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2407
2408 /* Now process sub-blocks. */
87caf699 2409 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2410 set_used_decls (t);
2d4aecb3
AO
2411}
2412
23b2ce53 2413/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2414 Recursively does the same for subexpressions. Uses
2415 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2416
2417rtx
502b8322 2418copy_rtx_if_shared (rtx orig)
23b2ce53 2419{
32b32b16
AP
2420 copy_rtx_if_shared_1 (&orig);
2421 return orig;
2422}
2423
ff954f39
AP
2424/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2425 use. Recursively does the same for subexpressions. */
2426
32b32b16
AP
2427static void
2428copy_rtx_if_shared_1 (rtx *orig1)
2429{
2430 rtx x;
b3694847
SS
2431 int i;
2432 enum rtx_code code;
32b32b16 2433 rtx *last_ptr;
b3694847 2434 const char *format_ptr;
23b2ce53 2435 int copied = 0;
32b32b16
AP
2436 int length;
2437
2438 /* Repeat is used to turn tail-recursion into iteration. */
2439repeat:
2440 x = *orig1;
23b2ce53
RS
2441
2442 if (x == 0)
32b32b16 2443 return;
23b2ce53
RS
2444
2445 code = GET_CODE (x);
2446
2447 /* These types may be freely shared. */
2448
2449 switch (code)
2450 {
2451 case REG:
23b2ce53
RS
2452 case CONST_INT:
2453 case CONST_DOUBLE:
091a3ac7 2454 case CONST_FIXED:
69ef87e2 2455 case CONST_VECTOR:
23b2ce53 2456 case SYMBOL_REF:
2c07f13b 2457 case LABEL_REF:
23b2ce53
RS
2458 case CODE_LABEL:
2459 case PC:
2460 case CC0:
2461 case SCRATCH:
0f41302f 2462 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2463 return;
3e89ed8d
JH
2464 case CLOBBER:
2465 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2466 return;
2467 break;
23b2ce53 2468
b851ea09 2469 case CONST:
6fb5fa3c 2470 if (shared_const_p (x))
32b32b16 2471 return;
b851ea09
RK
2472 break;
2473
23b2ce53
RS
2474 case INSN:
2475 case JUMP_INSN:
2476 case CALL_INSN:
2477 case NOTE:
23b2ce53
RS
2478 case BARRIER:
2479 /* The chain of insns is not being copied. */
32b32b16 2480 return;
23b2ce53 2481
e9a25f70
JL
2482 default:
2483 break;
23b2ce53
RS
2484 }
2485
2486 /* This rtx may not be shared. If it has already been seen,
2487 replace it with a copy of itself. */
2488
2adc7f12 2489 if (RTX_FLAG (x, used))
23b2ce53 2490 {
aacd3885 2491 x = shallow_copy_rtx (x);
23b2ce53
RS
2492 copied = 1;
2493 }
2adc7f12 2494 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2495
2496 /* Now scan the subexpressions recursively.
2497 We can store any replaced subexpressions directly into X
2498 since we know X is not shared! Any vectors in X
2499 must be copied if X was copied. */
2500
2501 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2502 length = GET_RTX_LENGTH (code);
2503 last_ptr = NULL;
2504
2505 for (i = 0; i < length; i++)
23b2ce53
RS
2506 {
2507 switch (*format_ptr++)
2508 {
2509 case 'e':
32b32b16
AP
2510 if (last_ptr)
2511 copy_rtx_if_shared_1 (last_ptr);
2512 last_ptr = &XEXP (x, i);
23b2ce53
RS
2513 break;
2514
2515 case 'E':
2516 if (XVEC (x, i) != NULL)
2517 {
b3694847 2518 int j;
f0722107 2519 int len = XVECLEN (x, i);
32b32b16 2520
6614fd40
KH
2521 /* Copy the vector iff I copied the rtx and the length
2522 is nonzero. */
f0722107 2523 if (copied && len > 0)
8f985ec4 2524 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
32b32b16 2525
5d3cc252 2526 /* Call recursively on all inside the vector. */
f0722107 2527 for (j = 0; j < len; j++)
32b32b16
AP
2528 {
2529 if (last_ptr)
2530 copy_rtx_if_shared_1 (last_ptr);
2531 last_ptr = &XVECEXP (x, i, j);
2532 }
23b2ce53
RS
2533 }
2534 break;
2535 }
2536 }
32b32b16
AP
2537 *orig1 = x;
2538 if (last_ptr)
2539 {
2540 orig1 = last_ptr;
2541 goto repeat;
2542 }
2543 return;
23b2ce53
RS
2544}
2545
2546/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2547 to look for shared sub-parts. */
2548
2549void
502b8322 2550reset_used_flags (rtx x)
23b2ce53 2551{
b3694847
SS
2552 int i, j;
2553 enum rtx_code code;
2554 const char *format_ptr;
32b32b16 2555 int length;
23b2ce53 2556
32b32b16
AP
2557 /* Repeat is used to turn tail-recursion into iteration. */
2558repeat:
23b2ce53
RS
2559 if (x == 0)
2560 return;
2561
2562 code = GET_CODE (x);
2563
9faa82d8 2564 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2565 for them. */
2566
2567 switch (code)
2568 {
2569 case REG:
23b2ce53
RS
2570 case CONST_INT:
2571 case CONST_DOUBLE:
091a3ac7 2572 case CONST_FIXED:
69ef87e2 2573 case CONST_VECTOR:
23b2ce53
RS
2574 case SYMBOL_REF:
2575 case CODE_LABEL:
2576 case PC:
2577 case CC0:
2578 return;
2579
2580 case INSN:
2581 case JUMP_INSN:
2582 case CALL_INSN:
2583 case NOTE:
2584 case LABEL_REF:
2585 case BARRIER:
2586 /* The chain of insns is not being copied. */
2587 return;
750c9258 2588
e9a25f70
JL
2589 default:
2590 break;
23b2ce53
RS
2591 }
2592
2adc7f12 2593 RTX_FLAG (x, used) = 0;
23b2ce53
RS
2594
2595 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2596 length = GET_RTX_LENGTH (code);
2597
2598 for (i = 0; i < length; i++)
23b2ce53
RS
2599 {
2600 switch (*format_ptr++)
2601 {
2602 case 'e':
32b32b16
AP
2603 if (i == length-1)
2604 {
2605 x = XEXP (x, i);
2606 goto repeat;
2607 }
23b2ce53
RS
2608 reset_used_flags (XEXP (x, i));
2609 break;
2610
2611 case 'E':
2612 for (j = 0; j < XVECLEN (x, i); j++)
2613 reset_used_flags (XVECEXP (x, i, j));
2614 break;
2615 }
2616 }
2617}
2c07f13b
JH
2618
2619/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2620 to look for shared sub-parts. */
2621
2622void
2623set_used_flags (rtx x)
2624{
2625 int i, j;
2626 enum rtx_code code;
2627 const char *format_ptr;
2628
2629 if (x == 0)
2630 return;
2631
2632 code = GET_CODE (x);
2633
2634 /* These types may be freely shared so we needn't do any resetting
2635 for them. */
2636
2637 switch (code)
2638 {
2639 case REG:
2c07f13b
JH
2640 case CONST_INT:
2641 case CONST_DOUBLE:
091a3ac7 2642 case CONST_FIXED:
2c07f13b
JH
2643 case CONST_VECTOR:
2644 case SYMBOL_REF:
2645 case CODE_LABEL:
2646 case PC:
2647 case CC0:
2648 return;
2649
2650 case INSN:
2651 case JUMP_INSN:
2652 case CALL_INSN:
2653 case NOTE:
2654 case LABEL_REF:
2655 case BARRIER:
2656 /* The chain of insns is not being copied. */
2657 return;
2658
2659 default:
2660 break;
2661 }
2662
2663 RTX_FLAG (x, used) = 1;
2664
2665 format_ptr = GET_RTX_FORMAT (code);
2666 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2667 {
2668 switch (*format_ptr++)
2669 {
2670 case 'e':
2671 set_used_flags (XEXP (x, i));
2672 break;
2673
2674 case 'E':
2675 for (j = 0; j < XVECLEN (x, i); j++)
2676 set_used_flags (XVECEXP (x, i, j));
2677 break;
2678 }
2679 }
2680}
23b2ce53
RS
2681\f
2682/* Copy X if necessary so that it won't be altered by changes in OTHER.
2683 Return X or the rtx for the pseudo reg the value of X was copied into.
2684 OTHER must be valid as a SET_DEST. */
2685
2686rtx
502b8322 2687make_safe_from (rtx x, rtx other)
23b2ce53
RS
2688{
2689 while (1)
2690 switch (GET_CODE (other))
2691 {
2692 case SUBREG:
2693 other = SUBREG_REG (other);
2694 break;
2695 case STRICT_LOW_PART:
2696 case SIGN_EXTEND:
2697 case ZERO_EXTEND:
2698 other = XEXP (other, 0);
2699 break;
2700 default:
2701 goto done;
2702 }
2703 done:
3c0cb5de 2704 if ((MEM_P (other)
23b2ce53 2705 && ! CONSTANT_P (x)
f8cfc6aa 2706 && !REG_P (x)
23b2ce53 2707 && GET_CODE (x) != SUBREG)
f8cfc6aa 2708 || (REG_P (other)
23b2ce53
RS
2709 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2710 || reg_mentioned_p (other, x))))
2711 {
2712 rtx temp = gen_reg_rtx (GET_MODE (x));
2713 emit_move_insn (temp, x);
2714 return temp;
2715 }
2716 return x;
2717}
2718\f
2719/* Emission of insns (adding them to the doubly-linked list). */
2720
2721/* Return the first insn of the current sequence or current function. */
2722
2723rtx
502b8322 2724get_insns (void)
23b2ce53
RS
2725{
2726 return first_insn;
2727}
2728
3dec4024
JH
2729/* Specify a new insn as the first in the chain. */
2730
2731void
502b8322 2732set_first_insn (rtx insn)
3dec4024 2733{
5b0264cb 2734 gcc_assert (!PREV_INSN (insn));
3dec4024
JH
2735 first_insn = insn;
2736}
2737
23b2ce53
RS
2738/* Return the last insn emitted in current sequence or current function. */
2739
2740rtx
502b8322 2741get_last_insn (void)
23b2ce53
RS
2742{
2743 return last_insn;
2744}
2745
2746/* Specify a new insn as the last in the chain. */
2747
2748void
502b8322 2749set_last_insn (rtx insn)
23b2ce53 2750{
5b0264cb 2751 gcc_assert (!NEXT_INSN (insn));
23b2ce53
RS
2752 last_insn = insn;
2753}
2754
2755/* Return the last insn emitted, even if it is in a sequence now pushed. */
2756
2757rtx
502b8322 2758get_last_insn_anywhere (void)
23b2ce53
RS
2759{
2760 struct sequence_stack *stack;
2761 if (last_insn)
2762 return last_insn;
49ad7cfa 2763 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2764 if (stack->last != 0)
2765 return stack->last;
2766 return 0;
2767}
2768
2a496e8b
JDA
2769/* Return the first nonnote insn emitted in current sequence or current
2770 function. This routine looks inside SEQUENCEs. */
2771
2772rtx
502b8322 2773get_first_nonnote_insn (void)
2a496e8b 2774{
91373fe8
JDA
2775 rtx insn = first_insn;
2776
2777 if (insn)
2778 {
2779 if (NOTE_P (insn))
2780 for (insn = next_insn (insn);
2781 insn && NOTE_P (insn);
2782 insn = next_insn (insn))
2783 continue;
2784 else
2785 {
2ca202e7 2786 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2787 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2788 insn = XVECEXP (PATTERN (insn), 0, 0);
2789 }
2790 }
2a496e8b
JDA
2791
2792 return insn;
2793}
2794
2795/* Return the last nonnote insn emitted in current sequence or current
2796 function. This routine looks inside SEQUENCEs. */
2797
2798rtx
502b8322 2799get_last_nonnote_insn (void)
2a496e8b 2800{
91373fe8
JDA
2801 rtx insn = last_insn;
2802
2803 if (insn)
2804 {
2805 if (NOTE_P (insn))
2806 for (insn = previous_insn (insn);
2807 insn && NOTE_P (insn);
2808 insn = previous_insn (insn))
2809 continue;
2810 else
2811 {
2ca202e7 2812 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2813 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2814 insn = XVECEXP (PATTERN (insn), 0,
2815 XVECLEN (PATTERN (insn), 0) - 1);
2816 }
2817 }
2a496e8b
JDA
2818
2819 return insn;
2820}
2821
23b2ce53
RS
2822/* Return a number larger than any instruction's uid in this function. */
2823
2824int
502b8322 2825get_max_uid (void)
23b2ce53
RS
2826{
2827 return cur_insn_uid;
2828}
2829\f
2830/* Return the next insn. If it is a SEQUENCE, return the first insn
2831 of the sequence. */
2832
2833rtx
502b8322 2834next_insn (rtx insn)
23b2ce53 2835{
75547801
KG
2836 if (insn)
2837 {
2838 insn = NEXT_INSN (insn);
2839 if (insn && NONJUMP_INSN_P (insn)
2840 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2841 insn = XVECEXP (PATTERN (insn), 0, 0);
2842 }
23b2ce53 2843
75547801 2844 return insn;
23b2ce53
RS
2845}
2846
2847/* Return the previous insn. If it is a SEQUENCE, return the last insn
2848 of the sequence. */
2849
2850rtx
502b8322 2851previous_insn (rtx insn)
23b2ce53 2852{
75547801
KG
2853 if (insn)
2854 {
2855 insn = PREV_INSN (insn);
2856 if (insn && NONJUMP_INSN_P (insn)
2857 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2858 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2859 }
23b2ce53 2860
75547801 2861 return insn;
23b2ce53
RS
2862}
2863
2864/* Return the next insn after INSN that is not a NOTE. This routine does not
2865 look inside SEQUENCEs. */
2866
2867rtx
502b8322 2868next_nonnote_insn (rtx insn)
23b2ce53 2869{
75547801
KG
2870 while (insn)
2871 {
2872 insn = NEXT_INSN (insn);
2873 if (insn == 0 || !NOTE_P (insn))
2874 break;
2875 }
23b2ce53 2876
75547801 2877 return insn;
23b2ce53
RS
2878}
2879
2880/* Return the previous insn before INSN that is not a NOTE. This routine does
2881 not look inside SEQUENCEs. */
2882
2883rtx
502b8322 2884prev_nonnote_insn (rtx insn)
23b2ce53 2885{
75547801
KG
2886 while (insn)
2887 {
2888 insn = PREV_INSN (insn);
2889 if (insn == 0 || !NOTE_P (insn))
2890 break;
2891 }
23b2ce53 2892
75547801 2893 return insn;
23b2ce53
RS
2894}
2895
2896/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2897 or 0, if there is none. This routine does not look inside
0f41302f 2898 SEQUENCEs. */
23b2ce53
RS
2899
2900rtx
502b8322 2901next_real_insn (rtx insn)
23b2ce53 2902{
75547801
KG
2903 while (insn)
2904 {
2905 insn = NEXT_INSN (insn);
2906 if (insn == 0 || INSN_P (insn))
2907 break;
2908 }
23b2ce53 2909
75547801 2910 return insn;
23b2ce53
RS
2911}
2912
2913/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2914 or 0, if there is none. This routine does not look inside
2915 SEQUENCEs. */
2916
2917rtx
502b8322 2918prev_real_insn (rtx insn)
23b2ce53 2919{
75547801
KG
2920 while (insn)
2921 {
2922 insn = PREV_INSN (insn);
2923 if (insn == 0 || INSN_P (insn))
2924 break;
2925 }
23b2ce53 2926
75547801 2927 return insn;
23b2ce53
RS
2928}
2929
ee960939
OH
2930/* Return the last CALL_INSN in the current list, or 0 if there is none.
2931 This routine does not look inside SEQUENCEs. */
2932
2933rtx
502b8322 2934last_call_insn (void)
ee960939
OH
2935{
2936 rtx insn;
2937
2938 for (insn = get_last_insn ();
4b4bf941 2939 insn && !CALL_P (insn);
ee960939
OH
2940 insn = PREV_INSN (insn))
2941 ;
2942
2943 return insn;
2944}
2945
23b2ce53
RS
2946/* Find the next insn after INSN that really does something. This routine
2947 does not look inside SEQUENCEs. Until reload has completed, this is the
2948 same as next_real_insn. */
2949
69732dcb 2950int
4f588890 2951active_insn_p (const_rtx insn)
69732dcb 2952{
4b4bf941
JQ
2953 return (CALL_P (insn) || JUMP_P (insn)
2954 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
2955 && (! reload_completed
2956 || (GET_CODE (PATTERN (insn)) != USE
2957 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
2958}
2959
23b2ce53 2960rtx
502b8322 2961next_active_insn (rtx insn)
23b2ce53 2962{
75547801
KG
2963 while (insn)
2964 {
2965 insn = NEXT_INSN (insn);
2966 if (insn == 0 || active_insn_p (insn))
2967 break;
2968 }
23b2ce53 2969
75547801 2970 return insn;
23b2ce53
RS
2971}
2972
2973/* Find the last insn before INSN that really does something. This routine
2974 does not look inside SEQUENCEs. Until reload has completed, this is the
2975 same as prev_real_insn. */
2976
2977rtx
502b8322 2978prev_active_insn (rtx insn)
23b2ce53 2979{
75547801
KG
2980 while (insn)
2981 {
2982 insn = PREV_INSN (insn);
2983 if (insn == 0 || active_insn_p (insn))
2984 break;
2985 }
23b2ce53 2986
75547801 2987 return insn;
23b2ce53
RS
2988}
2989
2990/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2991
2992rtx
502b8322 2993next_label (rtx insn)
23b2ce53 2994{
75547801
KG
2995 while (insn)
2996 {
2997 insn = NEXT_INSN (insn);
2998 if (insn == 0 || LABEL_P (insn))
2999 break;
3000 }
23b2ce53 3001
75547801 3002 return insn;
23b2ce53
RS
3003}
3004
3005/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3006
3007rtx
502b8322 3008prev_label (rtx insn)
23b2ce53 3009{
75547801
KG
3010 while (insn)
3011 {
3012 insn = PREV_INSN (insn);
3013 if (insn == 0 || LABEL_P (insn))
3014 break;
3015 }
23b2ce53 3016
75547801 3017 return insn;
23b2ce53 3018}
6c2511d3
RS
3019
3020/* Return the last label to mark the same position as LABEL. Return null
3021 if LABEL itself is null. */
3022
3023rtx
3024skip_consecutive_labels (rtx label)
3025{
3026 rtx insn;
3027
3028 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3029 if (LABEL_P (insn))
3030 label = insn;
3031
3032 return label;
3033}
23b2ce53
RS
3034\f
3035#ifdef HAVE_cc0
c572e5ba
JVA
3036/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3037 and REG_CC_USER notes so we can find it. */
3038
3039void
502b8322 3040link_cc0_insns (rtx insn)
c572e5ba
JVA
3041{
3042 rtx user = next_nonnote_insn (insn);
3043
4b4bf941 3044 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3045 user = XVECEXP (PATTERN (user), 0, 0);
3046
65c5f2a6
ILT
3047 add_reg_note (user, REG_CC_SETTER, insn);
3048 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3049}
3050
23b2ce53
RS
3051/* Return the next insn that uses CC0 after INSN, which is assumed to
3052 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3053 applied to the result of this function should yield INSN).
3054
3055 Normally, this is simply the next insn. However, if a REG_CC_USER note
3056 is present, it contains the insn that uses CC0.
3057
3058 Return 0 if we can't find the insn. */
3059
3060rtx
502b8322 3061next_cc0_user (rtx insn)
23b2ce53 3062{
906c4e36 3063 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3064
3065 if (note)
3066 return XEXP (note, 0);
3067
3068 insn = next_nonnote_insn (insn);
4b4bf941 3069 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3070 insn = XVECEXP (PATTERN (insn), 0, 0);
3071
2c3c49de 3072 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3073 return insn;
3074
3075 return 0;
3076}
3077
3078/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3079 note, it is the previous insn. */
3080
3081rtx
502b8322 3082prev_cc0_setter (rtx insn)
23b2ce53 3083{
906c4e36 3084 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3085
3086 if (note)
3087 return XEXP (note, 0);
3088
3089 insn = prev_nonnote_insn (insn);
5b0264cb 3090 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3091
3092 return insn;
3093}
3094#endif
e5bef2e4 3095
594f8779
RZ
3096#ifdef AUTO_INC_DEC
3097/* Find a RTX_AUTOINC class rtx which matches DATA. */
3098
3099static int
3100find_auto_inc (rtx *xp, void *data)
3101{
3102 rtx x = *xp;
5ead67f6 3103 rtx reg = (rtx) data;
594f8779
RZ
3104
3105 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3106 return 0;
3107
3108 switch (GET_CODE (x))
3109 {
3110 case PRE_DEC:
3111 case PRE_INC:
3112 case POST_DEC:
3113 case POST_INC:
3114 case PRE_MODIFY:
3115 case POST_MODIFY:
3116 if (rtx_equal_p (reg, XEXP (x, 0)))
3117 return 1;
3118 break;
3119
3120 default:
3121 gcc_unreachable ();
3122 }
3123 return -1;
3124}
3125#endif
3126
e5bef2e4
HB
3127/* Increment the label uses for all labels present in rtx. */
3128
3129static void
502b8322 3130mark_label_nuses (rtx x)
e5bef2e4 3131{
b3694847
SS
3132 enum rtx_code code;
3133 int i, j;
3134 const char *fmt;
e5bef2e4
HB
3135
3136 code = GET_CODE (x);
7537fc90 3137 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3138 LABEL_NUSES (XEXP (x, 0))++;
3139
3140 fmt = GET_RTX_FORMAT (code);
3141 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3142 {
3143 if (fmt[i] == 'e')
0fb7aeda 3144 mark_label_nuses (XEXP (x, i));
e5bef2e4 3145 else if (fmt[i] == 'E')
0fb7aeda 3146 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3147 mark_label_nuses (XVECEXP (x, i, j));
3148 }
3149}
3150
23b2ce53
RS
3151\f
3152/* Try splitting insns that can be split for better scheduling.
3153 PAT is the pattern which might split.
3154 TRIAL is the insn providing PAT.
cc2902df 3155 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3156
3157 If this routine succeeds in splitting, it returns the first or last
11147ebe 3158 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3159 returns TRIAL. If the insn to be returned can be split, it will be. */
3160
3161rtx
502b8322 3162try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3163{
3164 rtx before = PREV_INSN (trial);
3165 rtx after = NEXT_INSN (trial);
23b2ce53 3166 int has_barrier = 0;
4a8cae83 3167 rtx note, seq, tem;
6b24c259 3168 int probability;
599aedd9
RH
3169 rtx insn_last, insn;
3170 int njumps = 0;
6b24c259
JH
3171
3172 if (any_condjump_p (trial)
3173 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3174 split_branch_probability = INTVAL (XEXP (note, 0));
3175 probability = split_branch_probability;
3176
3177 seq = split_insns (pat, trial);
3178
3179 split_branch_probability = -1;
23b2ce53
RS
3180
3181 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3182 We may need to handle this specially. */
4b4bf941 3183 if (after && BARRIER_P (after))
23b2ce53
RS
3184 {
3185 has_barrier = 1;
3186 after = NEXT_INSN (after);
3187 }
3188
599aedd9
RH
3189 if (!seq)
3190 return trial;
3191
3192 /* Avoid infinite loop if any insn of the result matches
3193 the original pattern. */
3194 insn_last = seq;
3195 while (1)
23b2ce53 3196 {
599aedd9
RH
3197 if (INSN_P (insn_last)
3198 && rtx_equal_p (PATTERN (insn_last), pat))
3199 return trial;
3200 if (!NEXT_INSN (insn_last))
3201 break;
3202 insn_last = NEXT_INSN (insn_last);
3203 }
750c9258 3204
6fb5fa3c
DB
3205 /* We will be adding the new sequence to the function. The splitters
3206 may have introduced invalid RTL sharing, so unshare the sequence now. */
3207 unshare_all_rtl_in_chain (seq);
3208
599aedd9
RH
3209 /* Mark labels. */
3210 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3211 {
4b4bf941 3212 if (JUMP_P (insn))
599aedd9
RH
3213 {
3214 mark_jump_label (PATTERN (insn), insn, 0);
3215 njumps++;
3216 if (probability != -1
3217 && any_condjump_p (insn)
3218 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3219 {
599aedd9
RH
3220 /* We can preserve the REG_BR_PROB notes only if exactly
3221 one jump is created, otherwise the machine description
3222 is responsible for this step using
3223 split_branch_probability variable. */
5b0264cb 3224 gcc_assert (njumps == 1);
65c5f2a6 3225 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3226 }
599aedd9
RH
3227 }
3228 }
3229
3230 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3231 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
4b4bf941 3232 if (CALL_P (trial))
599aedd9
RH
3233 {
3234 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3235 if (CALL_P (insn))
599aedd9 3236 {
f6a1f3f6
RH
3237 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3238 while (*p)
3239 p = &XEXP (*p, 1);
3240 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9
RH
3241 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3242 }
3243 }
4b5e8abe 3244
599aedd9
RH
3245 /* Copy notes, particularly those related to the CFG. */
3246 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3247 {
3248 switch (REG_NOTE_KIND (note))
3249 {
3250 case REG_EH_REGION:
594f8779 3251 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3252 {
4b4bf941 3253 if (CALL_P (insn)
d3a583b1 3254 || (flag_non_call_exceptions && INSN_P (insn)
599aedd9 3255 && may_trap_p (PATTERN (insn))))
65c5f2a6 3256 add_reg_note (insn, REG_EH_REGION, XEXP (note, 0));
2f937369 3257 }
599aedd9 3258 break;
216183ce 3259
599aedd9
RH
3260 case REG_NORETURN:
3261 case REG_SETJMP:
594f8779 3262 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3263 {
4b4bf941 3264 if (CALL_P (insn))
65c5f2a6 3265 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3266 }
599aedd9 3267 break;
d6e95df8 3268
599aedd9 3269 case REG_NON_LOCAL_GOTO:
594f8779 3270 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3271 {
4b4bf941 3272 if (JUMP_P (insn))
65c5f2a6 3273 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3274 }
599aedd9 3275 break;
e5bef2e4 3276
594f8779
RZ
3277#ifdef AUTO_INC_DEC
3278 case REG_INC:
3279 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3280 {
3281 rtx reg = XEXP (note, 0);
3282 if (!FIND_REG_INC_NOTE (insn, reg)
3283 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3284 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3285 }
3286 break;
3287#endif
3288
599aedd9
RH
3289 default:
3290 break;
23b2ce53 3291 }
599aedd9
RH
3292 }
3293
3294 /* If there are LABELS inside the split insns increment the
3295 usage count so we don't delete the label. */
cf7c4aa6 3296 if (INSN_P (trial))
599aedd9
RH
3297 {
3298 insn = insn_last;
3299 while (insn != NULL_RTX)
23b2ce53 3300 {
cf7c4aa6 3301 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3302 if (NONJUMP_INSN_P (insn))
599aedd9 3303 mark_label_nuses (PATTERN (insn));
23b2ce53 3304
599aedd9
RH
3305 insn = PREV_INSN (insn);
3306 }
23b2ce53
RS
3307 }
3308
0435312e 3309 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3310
3311 delete_insn (trial);
3312 if (has_barrier)
3313 emit_barrier_after (tem);
3314
3315 /* Recursively call try_split for each new insn created; by the
3316 time control returns here that insn will be fully split, so
3317 set LAST and continue from the insn after the one returned.
3318 We can't use next_active_insn here since AFTER may be a note.
3319 Ignore deleted insns, which can be occur if not optimizing. */
3320 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3321 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3322 tem = try_split (PATTERN (tem), tem, 1);
3323
3324 /* Return either the first or the last insn, depending on which was
3325 requested. */
3326 return last
3327 ? (after ? PREV_INSN (after) : last_insn)
3328 : NEXT_INSN (before);
23b2ce53
RS
3329}
3330\f
3331/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3332 Store PATTERN in the pattern slots. */
23b2ce53
RS
3333
3334rtx
502b8322 3335make_insn_raw (rtx pattern)
23b2ce53 3336{
b3694847 3337 rtx insn;
23b2ce53 3338
1f8f4a0b 3339 insn = rtx_alloc (INSN);
23b2ce53 3340
43127294 3341 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3342 PATTERN (insn) = pattern;
3343 INSN_CODE (insn) = -1;
1632afca 3344 REG_NOTES (insn) = NULL;
55e092c4 3345 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3346 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3347
47984720
NC
3348#ifdef ENABLE_RTL_CHECKING
3349 if (insn
2c3c49de 3350 && INSN_P (insn)
47984720
NC
3351 && (returnjump_p (insn)
3352 || (GET_CODE (insn) == SET
3353 && SET_DEST (insn) == pc_rtx)))
3354 {
d4ee4d25 3355 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3356 debug_rtx (insn);
3357 }
3358#endif
750c9258 3359
23b2ce53
RS
3360 return insn;
3361}
3362
2f937369 3363/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3364
38109dab 3365rtx
502b8322 3366make_jump_insn_raw (rtx pattern)
23b2ce53 3367{
b3694847 3368 rtx insn;
23b2ce53 3369
4b1f5e8c 3370 insn = rtx_alloc (JUMP_INSN);
1632afca 3371 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3372
3373 PATTERN (insn) = pattern;
3374 INSN_CODE (insn) = -1;
1632afca
RS
3375 REG_NOTES (insn) = NULL;
3376 JUMP_LABEL (insn) = NULL;
55e092c4 3377 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3378 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3379
3380 return insn;
3381}
aff507f4 3382
2f937369 3383/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3384
3385static rtx
502b8322 3386make_call_insn_raw (rtx pattern)
aff507f4 3387{
b3694847 3388 rtx insn;
aff507f4
RK
3389
3390 insn = rtx_alloc (CALL_INSN);
3391 INSN_UID (insn) = cur_insn_uid++;
3392
3393 PATTERN (insn) = pattern;
3394 INSN_CODE (insn) = -1;
aff507f4
RK
3395 REG_NOTES (insn) = NULL;
3396 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
55e092c4 3397 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3398 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3399
3400 return insn;
3401}
23b2ce53
RS
3402\f
3403/* Add INSN to the end of the doubly-linked list.
3404 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3405
3406void
502b8322 3407add_insn (rtx insn)
23b2ce53
RS
3408{
3409 PREV_INSN (insn) = last_insn;
3410 NEXT_INSN (insn) = 0;
3411
3412 if (NULL != last_insn)
3413 NEXT_INSN (last_insn) = insn;
3414
3415 if (NULL == first_insn)
3416 first_insn = insn;
3417
3418 last_insn = insn;
3419}
3420
a0ae8e8d
RK
3421/* Add INSN into the doubly-linked list after insn AFTER. This and
3422 the next should be the only functions called to insert an insn once
ba213285 3423 delay slots have been filled since only they know how to update a
a0ae8e8d 3424 SEQUENCE. */
23b2ce53
RS
3425
3426void
6fb5fa3c 3427add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3428{
3429 rtx next = NEXT_INSN (after);
3430
5b0264cb 3431 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3432
23b2ce53
RS
3433 NEXT_INSN (insn) = next;
3434 PREV_INSN (insn) = after;
3435
3436 if (next)
3437 {
3438 PREV_INSN (next) = insn;
4b4bf941 3439 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3440 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3441 }
3442 else if (last_insn == after)
3443 last_insn = insn;
3444 else
3445 {
49ad7cfa 3446 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3447 /* Scan all pending sequences too. */
3448 for (; stack; stack = stack->next)
3449 if (after == stack->last)
fef0509b
RK
3450 {
3451 stack->last = insn;
3452 break;
3453 }
a0ae8e8d 3454
5b0264cb 3455 gcc_assert (stack);
23b2ce53
RS
3456 }
3457
4b4bf941
JQ
3458 if (!BARRIER_P (after)
3459 && !BARRIER_P (insn)
3c030e88
JH
3460 && (bb = BLOCK_FOR_INSN (after)))
3461 {
3462 set_block_for_insn (insn, bb);
38c1593d 3463 if (INSN_P (insn))
6fb5fa3c 3464 df_insn_rescan (insn);
3c030e88 3465 /* Should not happen as first in the BB is always
a1f300c0 3466 either NOTE or LABEL. */
a813c111 3467 if (BB_END (bb) == after
3c030e88 3468 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3469 && !BARRIER_P (insn)
a38e7aa5 3470 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3471 BB_END (bb) = insn;
3c030e88
JH
3472 }
3473
23b2ce53 3474 NEXT_INSN (after) = insn;
4b4bf941 3475 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3476 {
3477 rtx sequence = PATTERN (after);
3478 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3479 }
3480}
3481
a0ae8e8d 3482/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3483 the previous should be the only functions called to insert an insn
3484 once delay slots have been filled since only they know how to
3485 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3486 bb from before. */
a0ae8e8d
RK
3487
3488void
6fb5fa3c 3489add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3490{
3491 rtx prev = PREV_INSN (before);
3492
5b0264cb 3493 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3494
a0ae8e8d
RK
3495 PREV_INSN (insn) = prev;
3496 NEXT_INSN (insn) = before;
3497
3498 if (prev)
3499 {
3500 NEXT_INSN (prev) = insn;
4b4bf941 3501 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3502 {
3503 rtx sequence = PATTERN (prev);
3504 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3505 }
3506 }
3507 else if (first_insn == before)
3508 first_insn = insn;
3509 else
3510 {
49ad7cfa 3511 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3512 /* Scan all pending sequences too. */
3513 for (; stack; stack = stack->next)
3514 if (before == stack->first)
fef0509b
RK
3515 {
3516 stack->first = insn;
3517 break;
3518 }
a0ae8e8d 3519
5b0264cb 3520 gcc_assert (stack);
a0ae8e8d
RK
3521 }
3522
6fb5fa3c
DB
3523 if (!bb
3524 && !BARRIER_P (before)
3525 && !BARRIER_P (insn))
3526 bb = BLOCK_FOR_INSN (before);
3527
3528 if (bb)
3c030e88
JH
3529 {
3530 set_block_for_insn (insn, bb);
38c1593d 3531 if (INSN_P (insn))
6fb5fa3c 3532 df_insn_rescan (insn);
5b0264cb 3533 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3534 LABEL. */
5b0264cb
NS
3535 gcc_assert (BB_HEAD (bb) != insn
3536 /* Avoid clobbering of structure when creating new BB. */
3537 || BARRIER_P (insn)
a38e7aa5 3538 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3539 }
3540
a0ae8e8d 3541 PREV_INSN (before) = insn;
4b4bf941 3542 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3543 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3544}
3545
6fb5fa3c
DB
3546
3547/* Replace insn with an deleted instruction note. */
3548
3549void set_insn_deleted (rtx insn)
3550{
3551 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3552 PUT_CODE (insn, NOTE);
3553 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3554}
3555
3556
89e99eea
DB
3557/* Remove an insn from its doubly-linked list. This function knows how
3558 to handle sequences. */
3559void
502b8322 3560remove_insn (rtx insn)
89e99eea
DB
3561{
3562 rtx next = NEXT_INSN (insn);
3563 rtx prev = PREV_INSN (insn);
53c17031
JH
3564 basic_block bb;
3565
6fb5fa3c
DB
3566 /* Later in the code, the block will be marked dirty. */
3567 df_insn_delete (NULL, INSN_UID (insn));
3568
89e99eea
DB
3569 if (prev)
3570 {
3571 NEXT_INSN (prev) = next;
4b4bf941 3572 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3573 {
3574 rtx sequence = PATTERN (prev);
3575 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3576 }
3577 }
3578 else if (first_insn == insn)
3579 first_insn = next;
3580 else
3581 {
49ad7cfa 3582 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3583 /* Scan all pending sequences too. */
3584 for (; stack; stack = stack->next)
3585 if (insn == stack->first)
3586 {
3587 stack->first = next;
3588 break;
3589 }
3590
5b0264cb 3591 gcc_assert (stack);
89e99eea
DB
3592 }
3593
3594 if (next)
3595 {
3596 PREV_INSN (next) = prev;
4b4bf941 3597 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3598 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3599 }
3600 else if (last_insn == insn)
3601 last_insn = prev;
3602 else
3603 {
49ad7cfa 3604 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3605 /* Scan all pending sequences too. */
3606 for (; stack; stack = stack->next)
3607 if (insn == stack->last)
3608 {
3609 stack->last = prev;
3610 break;
3611 }
3612
5b0264cb 3613 gcc_assert (stack);
89e99eea 3614 }
4b4bf941 3615 if (!BARRIER_P (insn)
53c17031
JH
3616 && (bb = BLOCK_FOR_INSN (insn)))
3617 {
38c1593d 3618 if (INSN_P (insn))
6fb5fa3c 3619 df_set_bb_dirty (bb);
a813c111 3620 if (BB_HEAD (bb) == insn)
53c17031 3621 {
3bf1e984
RK
3622 /* Never ever delete the basic block note without deleting whole
3623 basic block. */
5b0264cb 3624 gcc_assert (!NOTE_P (insn));
a813c111 3625 BB_HEAD (bb) = next;
53c17031 3626 }
a813c111
SB
3627 if (BB_END (bb) == insn)
3628 BB_END (bb) = prev;
53c17031 3629 }
89e99eea
DB
3630}
3631
ee960939
OH
3632/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3633
3634void
502b8322 3635add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 3636{
5b0264cb 3637 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
3638
3639 /* Put the register usage information on the CALL. If there is already
3640 some usage information, put ours at the end. */
3641 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3642 {
3643 rtx link;
3644
3645 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3646 link = XEXP (link, 1))
3647 ;
3648
3649 XEXP (link, 1) = call_fusage;
3650 }
3651 else
3652 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3653}
3654
23b2ce53
RS
3655/* Delete all insns made since FROM.
3656 FROM becomes the new last instruction. */
3657
3658void
502b8322 3659delete_insns_since (rtx from)
23b2ce53
RS
3660{
3661 if (from == 0)
3662 first_insn = 0;
3663 else
3664 NEXT_INSN (from) = 0;
3665 last_insn = from;
3666}
3667
5dab5552
MS
3668/* This function is deprecated, please use sequences instead.
3669
3670 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3671 The insns to be moved are those between FROM and TO.
3672 They are moved to a new position after the insn AFTER.
3673 AFTER must not be FROM or TO or any insn in between.
3674
3675 This function does not know about SEQUENCEs and hence should not be
3676 called after delay-slot filling has been done. */
3677
3678void
502b8322 3679reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53
RS
3680{
3681 /* Splice this bunch out of where it is now. */
3682 if (PREV_INSN (from))
3683 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3684 if (NEXT_INSN (to))
3685 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3686 if (last_insn == to)
3687 last_insn = PREV_INSN (from);
3688 if (first_insn == from)
3689 first_insn = NEXT_INSN (to);
3690
3691 /* Make the new neighbors point to it and it to them. */
3692 if (NEXT_INSN (after))
3693 PREV_INSN (NEXT_INSN (after)) = to;
3694
3695 NEXT_INSN (to) = NEXT_INSN (after);
3696 PREV_INSN (from) = after;
3697 NEXT_INSN (after) = from;
3698 if (after == last_insn)
3699 last_insn = to;
3700}
3701
3c030e88
JH
3702/* Same as function above, but take care to update BB boundaries. */
3703void
502b8322 3704reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
3705{
3706 rtx prev = PREV_INSN (from);
3707 basic_block bb, bb2;
3708
3709 reorder_insns_nobb (from, to, after);
3710
4b4bf941 3711 if (!BARRIER_P (after)
3c030e88
JH
3712 && (bb = BLOCK_FOR_INSN (after)))
3713 {
3714 rtx x;
6fb5fa3c 3715 df_set_bb_dirty (bb);
68252e27 3716
4b4bf941 3717 if (!BARRIER_P (from)
3c030e88
JH
3718 && (bb2 = BLOCK_FOR_INSN (from)))
3719 {
a813c111
SB
3720 if (BB_END (bb2) == to)
3721 BB_END (bb2) = prev;
6fb5fa3c 3722 df_set_bb_dirty (bb2);
3c030e88
JH
3723 }
3724
a813c111
SB
3725 if (BB_END (bb) == after)
3726 BB_END (bb) = to;
3c030e88
JH
3727
3728 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 3729 if (!BARRIER_P (x))
63642d5a 3730 df_insn_change_bb (x, bb);
3c030e88
JH
3731 }
3732}
3733
23b2ce53 3734\f
2f937369
DM
3735/* Emit insn(s) of given code and pattern
3736 at a specified place within the doubly-linked list.
23b2ce53 3737
2f937369
DM
3738 All of the emit_foo global entry points accept an object
3739 X which is either an insn list or a PATTERN of a single
3740 instruction.
23b2ce53 3741
2f937369
DM
3742 There are thus a few canonical ways to generate code and
3743 emit it at a specific place in the instruction stream. For
3744 example, consider the instruction named SPOT and the fact that
3745 we would like to emit some instructions before SPOT. We might
3746 do it like this:
23b2ce53 3747
2f937369
DM
3748 start_sequence ();
3749 ... emit the new instructions ...
3750 insns_head = get_insns ();
3751 end_sequence ();
23b2ce53 3752
2f937369 3753 emit_insn_before (insns_head, SPOT);
23b2ce53 3754
2f937369
DM
3755 It used to be common to generate SEQUENCE rtl instead, but that
3756 is a relic of the past which no longer occurs. The reason is that
3757 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3758 generated would almost certainly die right after it was created. */
23b2ce53 3759
2f937369 3760/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
3761
3762rtx
6fb5fa3c 3763emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
23b2ce53 3764{
2f937369 3765 rtx last = before;
b3694847 3766 rtx insn;
23b2ce53 3767
5b0264cb 3768 gcc_assert (before);
2f937369
DM
3769
3770 if (x == NULL_RTX)
3771 return last;
3772
3773 switch (GET_CODE (x))
23b2ce53 3774 {
2f937369
DM
3775 case INSN:
3776 case JUMP_INSN:
3777 case CALL_INSN:
3778 case CODE_LABEL:
3779 case BARRIER:
3780 case NOTE:
3781 insn = x;
3782 while (insn)
3783 {
3784 rtx next = NEXT_INSN (insn);
6fb5fa3c 3785 add_insn_before (insn, before, bb);
2f937369
DM
3786 last = insn;
3787 insn = next;
3788 }
3789 break;
3790
3791#ifdef ENABLE_RTL_CHECKING
3792 case SEQUENCE:
5b0264cb 3793 gcc_unreachable ();
2f937369
DM
3794 break;
3795#endif
3796
3797 default:
3798 last = make_insn_raw (x);
6fb5fa3c 3799 add_insn_before (last, before, bb);
2f937369 3800 break;
23b2ce53
RS
3801 }
3802
2f937369 3803 return last;
23b2ce53
RS
3804}
3805
2f937369 3806/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
3807 and output it before the instruction BEFORE. */
3808
3809rtx
a7102479 3810emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 3811{
d950dee3 3812 rtx insn, last = NULL_RTX;
aff507f4 3813
5b0264cb 3814 gcc_assert (before);
2f937369
DM
3815
3816 switch (GET_CODE (x))
aff507f4 3817 {
2f937369
DM
3818 case INSN:
3819 case JUMP_INSN:
3820 case CALL_INSN:
3821 case CODE_LABEL:
3822 case BARRIER:
3823 case NOTE:
3824 insn = x;
3825 while (insn)
3826 {
3827 rtx next = NEXT_INSN (insn);
6fb5fa3c 3828 add_insn_before (insn, before, NULL);
2f937369
DM
3829 last = insn;
3830 insn = next;
3831 }
3832 break;
3833
3834#ifdef ENABLE_RTL_CHECKING
3835 case SEQUENCE:
5b0264cb 3836 gcc_unreachable ();
2f937369
DM
3837 break;
3838#endif
3839
3840 default:
3841 last = make_jump_insn_raw (x);
6fb5fa3c 3842 add_insn_before (last, before, NULL);
2f937369 3843 break;
aff507f4
RK
3844 }
3845
2f937369 3846 return last;
23b2ce53
RS
3847}
3848
2f937369 3849/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
3850 and output it before the instruction BEFORE. */
3851
3852rtx
a7102479 3853emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 3854{
d950dee3 3855 rtx last = NULL_RTX, insn;
969d70ca 3856
5b0264cb 3857 gcc_assert (before);
2f937369
DM
3858
3859 switch (GET_CODE (x))
969d70ca 3860 {
2f937369
DM
3861 case INSN:
3862 case JUMP_INSN:
3863 case CALL_INSN:
3864 case CODE_LABEL:
3865 case BARRIER:
3866 case NOTE:
3867 insn = x;
3868 while (insn)
3869 {
3870 rtx next = NEXT_INSN (insn);
6fb5fa3c 3871 add_insn_before (insn, before, NULL);
2f937369
DM
3872 last = insn;
3873 insn = next;
3874 }
3875 break;
3876
3877#ifdef ENABLE_RTL_CHECKING
3878 case SEQUENCE:
5b0264cb 3879 gcc_unreachable ();
2f937369
DM
3880 break;
3881#endif
3882
3883 default:
3884 last = make_call_insn_raw (x);
6fb5fa3c 3885 add_insn_before (last, before, NULL);
2f937369 3886 break;
969d70ca
JH
3887 }
3888
2f937369 3889 return last;
969d70ca
JH
3890}
3891
23b2ce53 3892/* Make an insn of code BARRIER
e881bb1b 3893 and output it before the insn BEFORE. */
23b2ce53
RS
3894
3895rtx
502b8322 3896emit_barrier_before (rtx before)
23b2ce53 3897{
b3694847 3898 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
3899
3900 INSN_UID (insn) = cur_insn_uid++;
3901
6fb5fa3c 3902 add_insn_before (insn, before, NULL);
23b2ce53
RS
3903 return insn;
3904}
3905
e881bb1b
RH
3906/* Emit the label LABEL before the insn BEFORE. */
3907
3908rtx
502b8322 3909emit_label_before (rtx label, rtx before)
e881bb1b
RH
3910{
3911 /* This can be called twice for the same label as a result of the
3912 confusion that follows a syntax error! So make it harmless. */
3913 if (INSN_UID (label) == 0)
3914 {
3915 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 3916 add_insn_before (label, before, NULL);
e881bb1b
RH
3917 }
3918
3919 return label;
3920}
3921
23b2ce53
RS
3922/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3923
3924rtx
a38e7aa5 3925emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 3926{
b3694847 3927 rtx note = rtx_alloc (NOTE);
23b2ce53 3928 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 3929 NOTE_KIND (note) = subtype;
ba4f7968 3930 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 3931 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 3932
6fb5fa3c 3933 add_insn_before (note, before, NULL);
23b2ce53
RS
3934 return note;
3935}
3936\f
2f937369
DM
3937/* Helper for emit_insn_after, handles lists of instructions
3938 efficiently. */
23b2ce53 3939
2f937369 3940static rtx
6fb5fa3c 3941emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 3942{
2f937369
DM
3943 rtx last;
3944 rtx after_after;
6fb5fa3c
DB
3945 if (!bb && !BARRIER_P (after))
3946 bb = BLOCK_FOR_INSN (after);
23b2ce53 3947
6fb5fa3c 3948 if (bb)
23b2ce53 3949 {
6fb5fa3c 3950 df_set_bb_dirty (bb);
2f937369 3951 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 3952 if (!BARRIER_P (last))
6fb5fa3c
DB
3953 {
3954 set_block_for_insn (last, bb);
3955 df_insn_rescan (last);
3956 }
4b4bf941 3957 if (!BARRIER_P (last))
6fb5fa3c
DB
3958 {
3959 set_block_for_insn (last, bb);
3960 df_insn_rescan (last);
3961 }
a813c111
SB
3962 if (BB_END (bb) == after)
3963 BB_END (bb) = last;
23b2ce53
RS
3964 }
3965 else
2f937369
DM
3966 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3967 continue;
3968
3969 after_after = NEXT_INSN (after);
3970
3971 NEXT_INSN (after) = first;
3972 PREV_INSN (first) = after;
3973 NEXT_INSN (last) = after_after;
3974 if (after_after)
3975 PREV_INSN (after_after) = last;
3976
3977 if (after == last_insn)
3978 last_insn = last;
3979 return last;
3980}
3981
6fb5fa3c
DB
3982/* Make X be output after the insn AFTER and set the BB of insn. If
3983 BB is NULL, an attempt is made to infer the BB from AFTER. */
2f937369
DM
3984
3985rtx
6fb5fa3c 3986emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
2f937369
DM
3987{
3988 rtx last = after;
3989
5b0264cb 3990 gcc_assert (after);
2f937369
DM
3991
3992 if (x == NULL_RTX)
3993 return last;
3994
3995 switch (GET_CODE (x))
23b2ce53 3996 {
2f937369
DM
3997 case INSN:
3998 case JUMP_INSN:
3999 case CALL_INSN:
4000 case CODE_LABEL:
4001 case BARRIER:
4002 case NOTE:
6fb5fa3c 4003 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4004 break;
4005
4006#ifdef ENABLE_RTL_CHECKING
4007 case SEQUENCE:
5b0264cb 4008 gcc_unreachable ();
2f937369
DM
4009 break;
4010#endif
4011
4012 default:
4013 last = make_insn_raw (x);
6fb5fa3c 4014 add_insn_after (last, after, bb);
2f937369 4015 break;
23b2ce53
RS
4016 }
4017
2f937369 4018 return last;
23b2ce53
RS
4019}
4020
255680cf 4021
2f937369 4022/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4023 and output it after the insn AFTER. */
4024
4025rtx
a7102479 4026emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4027{
2f937369 4028 rtx last;
23b2ce53 4029
5b0264cb 4030 gcc_assert (after);
2f937369
DM
4031
4032 switch (GET_CODE (x))
23b2ce53 4033 {
2f937369
DM
4034 case INSN:
4035 case JUMP_INSN:
4036 case CALL_INSN:
4037 case CODE_LABEL:
4038 case BARRIER:
4039 case NOTE:
6fb5fa3c 4040 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4041 break;
4042
4043#ifdef ENABLE_RTL_CHECKING
4044 case SEQUENCE:
5b0264cb 4045 gcc_unreachable ();
2f937369
DM
4046 break;
4047#endif
4048
4049 default:
4050 last = make_jump_insn_raw (x);
6fb5fa3c 4051 add_insn_after (last, after, NULL);
2f937369 4052 break;
23b2ce53
RS
4053 }
4054
2f937369
DM
4055 return last;
4056}
4057
4058/* Make an instruction with body X and code CALL_INSN
4059 and output it after the instruction AFTER. */
4060
4061rtx
a7102479 4062emit_call_insn_after_noloc (rtx x, rtx after)
2f937369
DM
4063{
4064 rtx last;
4065
5b0264cb 4066 gcc_assert (after);
2f937369
DM
4067
4068 switch (GET_CODE (x))
4069 {
4070 case INSN:
4071 case JUMP_INSN:
4072 case CALL_INSN:
4073 case CODE_LABEL:
4074 case BARRIER:
4075 case NOTE:
6fb5fa3c 4076 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4077 break;
4078
4079#ifdef ENABLE_RTL_CHECKING
4080 case SEQUENCE:
5b0264cb 4081 gcc_unreachable ();
2f937369
DM
4082 break;
4083#endif
4084
4085 default:
4086 last = make_call_insn_raw (x);
6fb5fa3c 4087 add_insn_after (last, after, NULL);
2f937369
DM
4088 break;
4089 }
4090
4091 return last;
23b2ce53
RS
4092}
4093
4094/* Make an insn of code BARRIER
4095 and output it after the insn AFTER. */
4096
4097rtx
502b8322 4098emit_barrier_after (rtx after)
23b2ce53 4099{
b3694847 4100 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4101
4102 INSN_UID (insn) = cur_insn_uid++;
4103
6fb5fa3c 4104 add_insn_after (insn, after, NULL);
23b2ce53
RS
4105 return insn;
4106}
4107
4108/* Emit the label LABEL after the insn AFTER. */
4109
4110rtx
502b8322 4111emit_label_after (rtx label, rtx after)
23b2ce53
RS
4112{
4113 /* This can be called twice for the same label
4114 as a result of the confusion that follows a syntax error!
4115 So make it harmless. */
4116 if (INSN_UID (label) == 0)
4117 {
4118 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4119 add_insn_after (label, after, NULL);
23b2ce53
RS
4120 }
4121
4122 return label;
4123}
4124
4125/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4126
4127rtx
a38e7aa5 4128emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4129{
b3694847 4130 rtx note = rtx_alloc (NOTE);
23b2ce53 4131 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4132 NOTE_KIND (note) = subtype;
ba4f7968 4133 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4134 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4135 add_insn_after (note, after, NULL);
23b2ce53
RS
4136 return note;
4137}
23b2ce53 4138\f
a7102479 4139/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4140rtx
502b8322 4141emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4142{
6fb5fa3c 4143 rtx last = emit_insn_after_noloc (pattern, after, NULL);
0d682900 4144
a7102479 4145 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4146 return last;
4147
2f937369
DM
4148 after = NEXT_INSN (after);
4149 while (1)
4150 {
a7102479 4151 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4152 INSN_LOCATOR (after) = loc;
2f937369
DM
4153 if (after == last)
4154 break;
4155 after = NEXT_INSN (after);
4156 }
0d682900
JH
4157 return last;
4158}
4159
a7102479
JH
4160/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4161rtx
4162emit_insn_after (rtx pattern, rtx after)
4163{
4164 if (INSN_P (after))
4165 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4166 else
6fb5fa3c 4167 return emit_insn_after_noloc (pattern, after, NULL);
a7102479
JH
4168}
4169
4170/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4171rtx
502b8322 4172emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4173{
a7102479 4174 rtx last = emit_jump_insn_after_noloc (pattern, after);
2f937369 4175
a7102479 4176 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4177 return last;
4178
2f937369
DM
4179 after = NEXT_INSN (after);
4180 while (1)
4181 {
a7102479 4182 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4183 INSN_LOCATOR (after) = loc;
2f937369
DM
4184 if (after == last)
4185 break;
4186 after = NEXT_INSN (after);
4187 }
0d682900
JH
4188 return last;
4189}
4190
a7102479
JH
4191/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4192rtx
4193emit_jump_insn_after (rtx pattern, rtx after)
4194{
4195 if (INSN_P (after))
4196 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4197 else
4198 return emit_jump_insn_after_noloc (pattern, after);
4199}
4200
4201/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4202rtx
502b8322 4203emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4204{
a7102479 4205 rtx last = emit_call_insn_after_noloc (pattern, after);
2f937369 4206
a7102479 4207 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4208 return last;
4209
2f937369
DM
4210 after = NEXT_INSN (after);
4211 while (1)
4212 {
a7102479 4213 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4214 INSN_LOCATOR (after) = loc;
2f937369
DM
4215 if (after == last)
4216 break;
4217 after = NEXT_INSN (after);
4218 }
0d682900
JH
4219 return last;
4220}
4221
a7102479
JH
4222/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4223rtx
4224emit_call_insn_after (rtx pattern, rtx after)
4225{
4226 if (INSN_P (after))
4227 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4228 else
4229 return emit_call_insn_after_noloc (pattern, after);
4230}
4231
4232/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4233rtx
502b8322 4234emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4235{
4236 rtx first = PREV_INSN (before);
6fb5fa3c 4237 rtx last = emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4238
4239 if (pattern == NULL_RTX || !loc)
4240 return last;
4241
26cb3993
JH
4242 if (!first)
4243 first = get_insns ();
4244 else
4245 first = NEXT_INSN (first);
a7102479
JH
4246 while (1)
4247 {
4248 if (active_insn_p (first) && !INSN_LOCATOR (first))
4249 INSN_LOCATOR (first) = loc;
4250 if (first == last)
4251 break;
4252 first = NEXT_INSN (first);
4253 }
4254 return last;
4255}
4256
4257/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4258rtx
4259emit_insn_before (rtx pattern, rtx before)
4260{
4261 if (INSN_P (before))
4262 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4263 else
6fb5fa3c 4264 return emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4265}
4266
4267/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4268rtx
4269emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4270{
4271 rtx first = PREV_INSN (before);
4272 rtx last = emit_jump_insn_before_noloc (pattern, before);
4273
4274 if (pattern == NULL_RTX)
4275 return last;
4276
4277 first = NEXT_INSN (first);
4278 while (1)
4279 {
4280 if (active_insn_p (first) && !INSN_LOCATOR (first))
4281 INSN_LOCATOR (first) = loc;
4282 if (first == last)
4283 break;
4284 first = NEXT_INSN (first);
4285 }
4286 return last;
4287}
4288
4289/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4290rtx
4291emit_jump_insn_before (rtx pattern, rtx before)
4292{
4293 if (INSN_P (before))
4294 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4295 else
4296 return emit_jump_insn_before_noloc (pattern, before);
4297}
4298
4299/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4300rtx
4301emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4302{
4303 rtx first = PREV_INSN (before);
4304 rtx last = emit_call_insn_before_noloc (pattern, before);
0d682900 4305
dd3adcf8
DJ
4306 if (pattern == NULL_RTX)
4307 return last;
4308
2f937369
DM
4309 first = NEXT_INSN (first);
4310 while (1)
4311 {
a7102479 4312 if (active_insn_p (first) && !INSN_LOCATOR (first))
0435312e 4313 INSN_LOCATOR (first) = loc;
2f937369
DM
4314 if (first == last)
4315 break;
4316 first = NEXT_INSN (first);
4317 }
0d682900
JH
4318 return last;
4319}
a7102479
JH
4320
4321/* like emit_call_insn_before_noloc,
4322 but set insn_locator according to before. */
4323rtx
4324emit_call_insn_before (rtx pattern, rtx before)
4325{
4326 if (INSN_P (before))
4327 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4328 else
4329 return emit_call_insn_before_noloc (pattern, before);
4330}
0d682900 4331\f
2f937369
DM
4332/* Take X and emit it at the end of the doubly-linked
4333 INSN list.
23b2ce53
RS
4334
4335 Returns the last insn emitted. */
4336
4337rtx
502b8322 4338emit_insn (rtx x)
23b2ce53 4339{
2f937369
DM
4340 rtx last = last_insn;
4341 rtx insn;
23b2ce53 4342
2f937369
DM
4343 if (x == NULL_RTX)
4344 return last;
23b2ce53 4345
2f937369
DM
4346 switch (GET_CODE (x))
4347 {
4348 case INSN:
4349 case JUMP_INSN:
4350 case CALL_INSN:
4351 case CODE_LABEL:
4352 case BARRIER:
4353 case NOTE:
4354 insn = x;
4355 while (insn)
23b2ce53 4356 {
2f937369 4357 rtx next = NEXT_INSN (insn);
23b2ce53 4358 add_insn (insn);
2f937369
DM
4359 last = insn;
4360 insn = next;
23b2ce53 4361 }
2f937369 4362 break;
23b2ce53 4363
2f937369
DM
4364#ifdef ENABLE_RTL_CHECKING
4365 case SEQUENCE:
5b0264cb 4366 gcc_unreachable ();
2f937369
DM
4367 break;
4368#endif
23b2ce53 4369
2f937369
DM
4370 default:
4371 last = make_insn_raw (x);
4372 add_insn (last);
4373 break;
23b2ce53
RS
4374 }
4375
4376 return last;
4377}
4378
2f937369
DM
4379/* Make an insn of code JUMP_INSN with pattern X
4380 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4381
4382rtx
502b8322 4383emit_jump_insn (rtx x)
23b2ce53 4384{
d950dee3 4385 rtx last = NULL_RTX, insn;
23b2ce53 4386
2f937369 4387 switch (GET_CODE (x))
23b2ce53 4388 {
2f937369
DM
4389 case INSN:
4390 case JUMP_INSN:
4391 case CALL_INSN:
4392 case CODE_LABEL:
4393 case BARRIER:
4394 case NOTE:
4395 insn = x;
4396 while (insn)
4397 {
4398 rtx next = NEXT_INSN (insn);
4399 add_insn (insn);
4400 last = insn;
4401 insn = next;
4402 }
4403 break;
e0a5c5eb 4404
2f937369
DM
4405#ifdef ENABLE_RTL_CHECKING
4406 case SEQUENCE:
5b0264cb 4407 gcc_unreachable ();
2f937369
DM
4408 break;
4409#endif
e0a5c5eb 4410
2f937369
DM
4411 default:
4412 last = make_jump_insn_raw (x);
4413 add_insn (last);
4414 break;
3c030e88 4415 }
e0a5c5eb
RS
4416
4417 return last;
4418}
4419
2f937369 4420/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4421 and add it to the end of the doubly-linked list. */
4422
4423rtx
502b8322 4424emit_call_insn (rtx x)
23b2ce53 4425{
2f937369
DM
4426 rtx insn;
4427
4428 switch (GET_CODE (x))
23b2ce53 4429 {
2f937369
DM
4430 case INSN:
4431 case JUMP_INSN:
4432 case CALL_INSN:
4433 case CODE_LABEL:
4434 case BARRIER:
4435 case NOTE:
4436 insn = emit_insn (x);
4437 break;
23b2ce53 4438
2f937369
DM
4439#ifdef ENABLE_RTL_CHECKING
4440 case SEQUENCE:
5b0264cb 4441 gcc_unreachable ();
2f937369
DM
4442 break;
4443#endif
23b2ce53 4444
2f937369
DM
4445 default:
4446 insn = make_call_insn_raw (x);
23b2ce53 4447 add_insn (insn);
2f937369 4448 break;
23b2ce53 4449 }
2f937369
DM
4450
4451 return insn;
23b2ce53
RS
4452}
4453
4454/* Add the label LABEL to the end of the doubly-linked list. */
4455
4456rtx
502b8322 4457emit_label (rtx label)
23b2ce53
RS
4458{
4459 /* This can be called twice for the same label
4460 as a result of the confusion that follows a syntax error!
4461 So make it harmless. */
4462 if (INSN_UID (label) == 0)
4463 {
4464 INSN_UID (label) = cur_insn_uid++;
4465 add_insn (label);
4466 }
4467 return label;
4468}
4469
4470/* Make an insn of code BARRIER
4471 and add it to the end of the doubly-linked list. */
4472
4473rtx
502b8322 4474emit_barrier (void)
23b2ce53 4475{
b3694847 4476 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4477 INSN_UID (barrier) = cur_insn_uid++;
4478 add_insn (barrier);
4479 return barrier;
4480}
4481
5f2fc772 4482/* Emit a copy of note ORIG. */
502b8322 4483
5f2fc772
NS
4484rtx
4485emit_note_copy (rtx orig)
4486{
4487 rtx note;
4488
5f2fc772
NS
4489 note = rtx_alloc (NOTE);
4490
4491 INSN_UID (note) = cur_insn_uid++;
4492 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 4493 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
4494 BLOCK_FOR_INSN (note) = NULL;
4495 add_insn (note);
4496
2e040219 4497 return note;
23b2ce53
RS
4498}
4499
2e040219
NS
4500/* Make an insn of code NOTE or type NOTE_NO
4501 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4502
4503rtx
a38e7aa5 4504emit_note (enum insn_note kind)
23b2ce53 4505{
b3694847 4506 rtx note;
23b2ce53 4507
23b2ce53
RS
4508 note = rtx_alloc (NOTE);
4509 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4510 NOTE_KIND (note) = kind;
dd107e66 4511 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4512 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4513 add_insn (note);
4514 return note;
4515}
4516
c41c1387
RS
4517/* Emit a clobber of lvalue X. */
4518
4519rtx
4520emit_clobber (rtx x)
4521{
4522 /* CONCATs should not appear in the insn stream. */
4523 if (GET_CODE (x) == CONCAT)
4524 {
4525 emit_clobber (XEXP (x, 0));
4526 return emit_clobber (XEXP (x, 1));
4527 }
4528 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4529}
4530
4531/* Return a sequence of insns to clobber lvalue X. */
4532
4533rtx
4534gen_clobber (rtx x)
4535{
4536 rtx seq;
4537
4538 start_sequence ();
4539 emit_clobber (x);
4540 seq = get_insns ();
4541 end_sequence ();
4542 return seq;
4543}
4544
4545/* Emit a use of rvalue X. */
4546
4547rtx
4548emit_use (rtx x)
4549{
4550 /* CONCATs should not appear in the insn stream. */
4551 if (GET_CODE (x) == CONCAT)
4552 {
4553 emit_use (XEXP (x, 0));
4554 return emit_use (XEXP (x, 1));
4555 }
4556 return emit_insn (gen_rtx_USE (VOIDmode, x));
4557}
4558
4559/* Return a sequence of insns to use rvalue X. */
4560
4561rtx
4562gen_use (rtx x)
4563{
4564 rtx seq;
4565
4566 start_sequence ();
4567 emit_use (x);
4568 seq = get_insns ();
4569 end_sequence ();
4570 return seq;
4571}
4572
23b2ce53 4573/* Cause next statement to emit a line note even if the line number
0cea056b 4574 has not changed. */
23b2ce53
RS
4575
4576void
502b8322 4577force_next_line_note (void)
23b2ce53 4578{
6773e15f 4579 last_location = -1;
23b2ce53 4580}
87b47c85
AM
4581
4582/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4583 note of this type already exists, remove it first. */
87b47c85 4584
3d238248 4585rtx
502b8322 4586set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4587{
4588 rtx note = find_reg_note (insn, kind, NULL_RTX);
4589
52488da1
JW
4590 switch (kind)
4591 {
4592 case REG_EQUAL:
4593 case REG_EQUIV:
4594 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4595 has multiple sets (some callers assume single_set
4596 means the insn only has one set, when in fact it
4597 means the insn only has one * useful * set). */
4598 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4599 {
5b0264cb 4600 gcc_assert (!note);
52488da1
JW
4601 return NULL_RTX;
4602 }
4603
4604 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4605 It serves no useful purpose and breaks eliminate_regs. */
4606 if (GET_CODE (datum) == ASM_OPERANDS)
4607 return NULL_RTX;
6fb5fa3c
DB
4608
4609 if (note)
4610 {
4611 XEXP (note, 0) = datum;
4612 df_notes_rescan (insn);
4613 return note;
4614 }
52488da1
JW
4615 break;
4616
4617 default:
6fb5fa3c
DB
4618 if (note)
4619 {
4620 XEXP (note, 0) = datum;
4621 return note;
4622 }
52488da1
JW
4623 break;
4624 }
3d238248 4625
65c5f2a6 4626 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
4627
4628 switch (kind)
3d238248 4629 {
6fb5fa3c
DB
4630 case REG_EQUAL:
4631 case REG_EQUIV:
4632 df_notes_rescan (insn);
4633 break;
4634 default:
4635 break;
3d238248 4636 }
87b47c85 4637
3d238248 4638 return REG_NOTES (insn);
87b47c85 4639}
23b2ce53
RS
4640\f
4641/* Return an indication of which type of insn should have X as a body.
4642 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4643
d78db459 4644static enum rtx_code
502b8322 4645classify_insn (rtx x)
23b2ce53 4646{
4b4bf941 4647 if (LABEL_P (x))
23b2ce53
RS
4648 return CODE_LABEL;
4649 if (GET_CODE (x) == CALL)
4650 return CALL_INSN;
4651 if (GET_CODE (x) == RETURN)
4652 return JUMP_INSN;
4653 if (GET_CODE (x) == SET)
4654 {
4655 if (SET_DEST (x) == pc_rtx)
4656 return JUMP_INSN;
4657 else if (GET_CODE (SET_SRC (x)) == CALL)
4658 return CALL_INSN;
4659 else
4660 return INSN;
4661 }
4662 if (GET_CODE (x) == PARALLEL)
4663 {
b3694847 4664 int j;
23b2ce53
RS
4665 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4666 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4667 return CALL_INSN;
4668 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4669 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4670 return JUMP_INSN;
4671 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4672 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4673 return CALL_INSN;
4674 }
4675 return INSN;
4676}
4677
4678/* Emit the rtl pattern X as an appropriate kind of insn.
4679 If X is a label, it is simply added into the insn chain. */
4680
4681rtx
502b8322 4682emit (rtx x)
23b2ce53
RS
4683{
4684 enum rtx_code code = classify_insn (x);
4685
5b0264cb 4686 switch (code)
23b2ce53 4687 {
5b0264cb
NS
4688 case CODE_LABEL:
4689 return emit_label (x);
4690 case INSN:
4691 return emit_insn (x);
4692 case JUMP_INSN:
4693 {
4694 rtx insn = emit_jump_insn (x);
4695 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4696 return emit_barrier ();
4697 return insn;
4698 }
4699 case CALL_INSN:
4700 return emit_call_insn (x);
4701 default:
4702 gcc_unreachable ();
23b2ce53 4703 }
23b2ce53
RS
4704}
4705\f
e2500fed 4706/* Space for free sequence stack entries. */
1431042e 4707static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 4708
4dfa0342
RH
4709/* Begin emitting insns to a sequence. If this sequence will contain
4710 something that might cause the compiler to pop arguments to function
4711 calls (because those pops have previously been deferred; see
4712 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4713 before calling this function. That will ensure that the deferred
4714 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
4715
4716void
502b8322 4717start_sequence (void)
23b2ce53
RS
4718{
4719 struct sequence_stack *tem;
4720
e2500fed
GK
4721 if (free_sequence_stack != NULL)
4722 {
4723 tem = free_sequence_stack;
4724 free_sequence_stack = tem->next;
4725 }
4726 else
1b4572a8 4727 tem = GGC_NEW (struct sequence_stack);
23b2ce53 4728
49ad7cfa 4729 tem->next = seq_stack;
23b2ce53
RS
4730 tem->first = first_insn;
4731 tem->last = last_insn;
4732
49ad7cfa 4733 seq_stack = tem;
23b2ce53
RS
4734
4735 first_insn = 0;
4736 last_insn = 0;
4737}
4738
5c7a310f
MM
4739/* Set up the insn chain starting with FIRST as the current sequence,
4740 saving the previously current one. See the documentation for
4741 start_sequence for more information about how to use this function. */
23b2ce53
RS
4742
4743void
502b8322 4744push_to_sequence (rtx first)
23b2ce53
RS
4745{
4746 rtx last;
4747
4748 start_sequence ();
4749
4750 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4751
4752 first_insn = first;
4753 last_insn = last;
4754}
4755
bb27eeda
SE
4756/* Like push_to_sequence, but take the last insn as an argument to avoid
4757 looping through the list. */
4758
4759void
4760push_to_sequence2 (rtx first, rtx last)
4761{
4762 start_sequence ();
4763
4764 first_insn = first;
4765 last_insn = last;
4766}
4767
f15ae3a1
TW
4768/* Set up the outer-level insn chain
4769 as the current sequence, saving the previously current one. */
4770
4771void
502b8322 4772push_topmost_sequence (void)
f15ae3a1 4773{
aefdd5ab 4774 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
4775
4776 start_sequence ();
4777
49ad7cfa 4778 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4779 top = stack;
4780
4781 first_insn = top->first;
4782 last_insn = top->last;
4783}
4784
4785/* After emitting to the outer-level insn chain, update the outer-level
4786 insn chain, and restore the previous saved state. */
4787
4788void
502b8322 4789pop_topmost_sequence (void)
f15ae3a1 4790{
aefdd5ab 4791 struct sequence_stack *stack, *top = NULL;
f15ae3a1 4792
49ad7cfa 4793 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4794 top = stack;
4795
4796 top->first = first_insn;
4797 top->last = last_insn;
4798
4799 end_sequence ();
4800}
4801
23b2ce53
RS
4802/* After emitting to a sequence, restore previous saved state.
4803
5c7a310f 4804 To get the contents of the sequence just made, you must call
2f937369 4805 `get_insns' *before* calling here.
5c7a310f
MM
4806
4807 If the compiler might have deferred popping arguments while
4808 generating this sequence, and this sequence will not be immediately
4809 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 4810 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
4811 pops are inserted into this sequence, and not into some random
4812 location in the instruction stream. See INHIBIT_DEFER_POP for more
4813 information about deferred popping of arguments. */
23b2ce53
RS
4814
4815void
502b8322 4816end_sequence (void)
23b2ce53 4817{
49ad7cfa 4818 struct sequence_stack *tem = seq_stack;
23b2ce53
RS
4819
4820 first_insn = tem->first;
4821 last_insn = tem->last;
49ad7cfa 4822 seq_stack = tem->next;
23b2ce53 4823
e2500fed
GK
4824 memset (tem, 0, sizeof (*tem));
4825 tem->next = free_sequence_stack;
4826 free_sequence_stack = tem;
23b2ce53
RS
4827}
4828
4829/* Return 1 if currently emitting into a sequence. */
4830
4831int
502b8322 4832in_sequence_p (void)
23b2ce53 4833{
49ad7cfa 4834 return seq_stack != 0;
23b2ce53 4835}
23b2ce53 4836\f
59ec66dc
MM
4837/* Put the various virtual registers into REGNO_REG_RTX. */
4838
2bbdec73 4839static void
bd60bab2 4840init_virtual_regs (void)
59ec66dc 4841{
bd60bab2
JH
4842 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4843 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4844 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4845 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4846 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
49ad7cfa
BS
4847}
4848
da43a810
BS
4849\f
4850/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4851static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4852static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4853static int copy_insn_n_scratches;
4854
4855/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4856 copied an ASM_OPERANDS.
4857 In that case, it is the original input-operand vector. */
4858static rtvec orig_asm_operands_vector;
4859
4860/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4861 copied an ASM_OPERANDS.
4862 In that case, it is the copied input-operand vector. */
4863static rtvec copy_asm_operands_vector;
4864
4865/* Likewise for the constraints vector. */
4866static rtvec orig_asm_constraints_vector;
4867static rtvec copy_asm_constraints_vector;
4868
4869/* Recursively create a new copy of an rtx for copy_insn.
4870 This function differs from copy_rtx in that it handles SCRATCHes and
4871 ASM_OPERANDs properly.
4872 Normally, this function is not used directly; use copy_insn as front end.
4873 However, you could first copy an insn pattern with copy_insn and then use
4874 this function afterwards to properly copy any REG_NOTEs containing
4875 SCRATCHes. */
4876
4877rtx
502b8322 4878copy_insn_1 (rtx orig)
da43a810 4879{
b3694847
SS
4880 rtx copy;
4881 int i, j;
4882 RTX_CODE code;
4883 const char *format_ptr;
da43a810
BS
4884
4885 code = GET_CODE (orig);
4886
4887 switch (code)
4888 {
4889 case REG:
da43a810
BS
4890 case CONST_INT:
4891 case CONST_DOUBLE:
091a3ac7 4892 case CONST_FIXED:
69ef87e2 4893 case CONST_VECTOR:
da43a810
BS
4894 case SYMBOL_REF:
4895 case CODE_LABEL:
4896 case PC:
4897 case CC0:
da43a810 4898 return orig;
3e89ed8d
JH
4899 case CLOBBER:
4900 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4901 return orig;
4902 break;
da43a810
BS
4903
4904 case SCRATCH:
4905 for (i = 0; i < copy_insn_n_scratches; i++)
4906 if (copy_insn_scratch_in[i] == orig)
4907 return copy_insn_scratch_out[i];
4908 break;
4909
4910 case CONST:
6fb5fa3c 4911 if (shared_const_p (orig))
da43a810
BS
4912 return orig;
4913 break;
750c9258 4914
da43a810
BS
4915 /* A MEM with a constant address is not sharable. The problem is that
4916 the constant address may need to be reloaded. If the mem is shared,
4917 then reloading one copy of this mem will cause all copies to appear
4918 to have been reloaded. */
4919
4920 default:
4921 break;
4922 }
4923
aacd3885
RS
4924 /* Copy the various flags, fields, and other information. We assume
4925 that all fields need copying, and then clear the fields that should
da43a810
BS
4926 not be copied. That is the sensible default behavior, and forces
4927 us to explicitly document why we are *not* copying a flag. */
aacd3885 4928 copy = shallow_copy_rtx (orig);
da43a810
BS
4929
4930 /* We do not copy the USED flag, which is used as a mark bit during
4931 walks over the RTL. */
2adc7f12 4932 RTX_FLAG (copy, used) = 0;
da43a810
BS
4933
4934 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 4935 if (INSN_P (orig))
da43a810 4936 {
2adc7f12
JJ
4937 RTX_FLAG (copy, jump) = 0;
4938 RTX_FLAG (copy, call) = 0;
4939 RTX_FLAG (copy, frame_related) = 0;
da43a810 4940 }
750c9258 4941
da43a810
BS
4942 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4943
4944 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
4945 switch (*format_ptr++)
4946 {
4947 case 'e':
4948 if (XEXP (orig, i) != NULL)
4949 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4950 break;
da43a810 4951
aacd3885
RS
4952 case 'E':
4953 case 'V':
4954 if (XVEC (orig, i) == orig_asm_constraints_vector)
4955 XVEC (copy, i) = copy_asm_constraints_vector;
4956 else if (XVEC (orig, i) == orig_asm_operands_vector)
4957 XVEC (copy, i) = copy_asm_operands_vector;
4958 else if (XVEC (orig, i) != NULL)
4959 {
4960 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4961 for (j = 0; j < XVECLEN (copy, i); j++)
4962 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4963 }
4964 break;
da43a810 4965
aacd3885
RS
4966 case 't':
4967 case 'w':
4968 case 'i':
4969 case 's':
4970 case 'S':
4971 case 'u':
4972 case '0':
4973 /* These are left unchanged. */
4974 break;
da43a810 4975
aacd3885
RS
4976 default:
4977 gcc_unreachable ();
4978 }
da43a810
BS
4979
4980 if (code == SCRATCH)
4981 {
4982 i = copy_insn_n_scratches++;
5b0264cb 4983 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
4984 copy_insn_scratch_in[i] = orig;
4985 copy_insn_scratch_out[i] = copy;
4986 }
4987 else if (code == ASM_OPERANDS)
4988 {
6462bb43
AO
4989 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4990 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4991 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4992 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
4993 }
4994
4995 return copy;
4996}
4997
4998/* Create a new copy of an rtx.
4999 This function differs from copy_rtx in that it handles SCRATCHes and
5000 ASM_OPERANDs properly.
5001 INSN doesn't really have to be a full INSN; it could be just the
5002 pattern. */
5003rtx
502b8322 5004copy_insn (rtx insn)
da43a810
BS
5005{
5006 copy_insn_n_scratches = 0;
5007 orig_asm_operands_vector = 0;
5008 orig_asm_constraints_vector = 0;
5009 copy_asm_operands_vector = 0;
5010 copy_asm_constraints_vector = 0;
5011 return copy_insn_1 (insn);
5012}
59ec66dc 5013
23b2ce53
RS
5014/* Initialize data structures and variables in this file
5015 before generating rtl for each function. */
5016
5017void
502b8322 5018init_emit (void)
23b2ce53 5019{
23b2ce53
RS
5020 first_insn = NULL;
5021 last_insn = NULL;
5022 cur_insn_uid = 1;
5023 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 5024 last_location = UNKNOWN_LOCATION;
23b2ce53 5025 first_label_num = label_num;
49ad7cfa 5026 seq_stack = NULL;
23b2ce53 5027
23b2ce53
RS
5028 /* Init the tables that describe all the pseudo regs. */
5029
3e029763 5030 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5031
3e029763 5032 crtl->emit.regno_pointer_align
1b4572a8 5033 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5034
750c9258 5035 regno_reg_rtx
1b4572a8 5036 = GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
0d4903b8 5037
e50126e8 5038 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876
JL
5039 memcpy (regno_reg_rtx,
5040 static_regno_reg_rtx,
5041 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5042
23b2ce53 5043 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5044 init_virtual_regs ();
740ab4a2
RK
5045
5046 /* Indicate that the virtual registers and stack locations are
5047 all pointers. */
3502dc9c
JDA
5048 REG_POINTER (stack_pointer_rtx) = 1;
5049 REG_POINTER (frame_pointer_rtx) = 1;
5050 REG_POINTER (hard_frame_pointer_rtx) = 1;
5051 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5052
3502dc9c
JDA
5053 REG_POINTER (virtual_incoming_args_rtx) = 1;
5054 REG_POINTER (virtual_stack_vars_rtx) = 1;
5055 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5056 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5057 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5058
86fe05e0 5059#ifdef STACK_BOUNDARY
bdb429a5
RK
5060 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5061 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5062 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5063 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5064
5065 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5066 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5067 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5068 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5069 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5070#endif
5071
5e82e7bd
JVA
5072#ifdef INIT_EXPANDERS
5073 INIT_EXPANDERS;
5074#endif
23b2ce53
RS
5075}
5076
a73b091d 5077/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5078
5079static rtx
a73b091d 5080gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5081{
5082 rtx tem;
5083 rtvec v;
5084 int units, i;
5085 enum machine_mode inner;
5086
5087 units = GET_MODE_NUNITS (mode);
5088 inner = GET_MODE_INNER (mode);
5089
15ed7b52
JG
5090 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5091
69ef87e2
AH
5092 v = rtvec_alloc (units);
5093
a73b091d
JW
5094 /* We need to call this function after we set the scalar const_tiny_rtx
5095 entries. */
5096 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5097
5098 for (i = 0; i < units; ++i)
a73b091d 5099 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5100
a06e3c40 5101 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5102 return tem;
5103}
5104
a06e3c40 5105/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5106 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5107rtx
502b8322 5108gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5109{
a73b091d
JW
5110 enum machine_mode inner = GET_MODE_INNER (mode);
5111 int nunits = GET_MODE_NUNITS (mode);
5112 rtx x;
a06e3c40
R
5113 int i;
5114
a73b091d
JW
5115 /* Check to see if all of the elements have the same value. */
5116 x = RTVEC_ELT (v, nunits - 1);
5117 for (i = nunits - 2; i >= 0; i--)
5118 if (RTVEC_ELT (v, i) != x)
5119 break;
5120
5121 /* If the values are all the same, check to see if we can use one of the
5122 standard constant vectors. */
5123 if (i == -1)
5124 {
5125 if (x == CONST0_RTX (inner))
5126 return CONST0_RTX (mode);
5127 else if (x == CONST1_RTX (inner))
5128 return CONST1_RTX (mode);
5129 }
5130
5131 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5132}
5133
b5deb7b6
SL
5134/* Initialise global register information required by all functions. */
5135
5136void
5137init_emit_regs (void)
5138{
5139 int i;
5140
5141 /* Reset register attributes */
5142 htab_empty (reg_attrs_htab);
5143
5144 /* We need reg_raw_mode, so initialize the modes now. */
5145 init_reg_modes_target ();
5146
5147 /* Assign register numbers to the globally defined register rtx. */
5148 pc_rtx = gen_rtx_PC (VOIDmode);
5149 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5150 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5151 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5152 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5153 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5154 virtual_incoming_args_rtx =
5155 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5156 virtual_stack_vars_rtx =
5157 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5158 virtual_stack_dynamic_rtx =
5159 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5160 virtual_outgoing_args_rtx =
5161 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5162 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5163
5164 /* Initialize RTL for commonly used hard registers. These are
5165 copied into regno_reg_rtx as we begin to compile each function. */
5166 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5167 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5168
5169#ifdef RETURN_ADDRESS_POINTER_REGNUM
5170 return_address_pointer_rtx
5171 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5172#endif
5173
5174#ifdef STATIC_CHAIN_REGNUM
5175 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5176
5177#ifdef STATIC_CHAIN_INCOMING_REGNUM
5178 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5179 static_chain_incoming_rtx
5180 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5181 else
5182#endif
5183 static_chain_incoming_rtx = static_chain_rtx;
5184#endif
5185
5186#ifdef STATIC_CHAIN
5187 static_chain_rtx = STATIC_CHAIN;
5188
5189#ifdef STATIC_CHAIN_INCOMING
5190 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5191#else
5192 static_chain_incoming_rtx = static_chain_rtx;
5193#endif
5194#endif
5195
5196 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5197 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5198 else
5199 pic_offset_table_rtx = NULL_RTX;
5200}
5201
23b2ce53
RS
5202/* Create some permanent unique rtl objects shared between all functions.
5203 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5204
5205void
502b8322 5206init_emit_once (int line_numbers)
23b2ce53
RS
5207{
5208 int i;
5209 enum machine_mode mode;
9ec36da5 5210 enum machine_mode double_mode;
23b2ce53 5211
091a3ac7
CF
5212 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5213 hash tables. */
17211ab5
GK
5214 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5215 const_int_htab_eq, NULL);
173b24b9 5216
17211ab5
GK
5217 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5218 const_double_htab_eq, NULL);
5692c7bc 5219
091a3ac7
CF
5220 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5221 const_fixed_htab_eq, NULL);
5222
17211ab5
GK
5223 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5224 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5225 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5226 reg_attrs_htab_eq, NULL);
67673f5c 5227
23b2ce53
RS
5228 no_line_numbers = ! line_numbers;
5229
43fa6302
AS
5230 /* Compute the word and byte modes. */
5231
5232 byte_mode = VOIDmode;
5233 word_mode = VOIDmode;
5234 double_mode = VOIDmode;
5235
15ed7b52
JG
5236 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5237 mode != VOIDmode;
43fa6302
AS
5238 mode = GET_MODE_WIDER_MODE (mode))
5239 {
5240 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5241 && byte_mode == VOIDmode)
5242 byte_mode = mode;
5243
5244 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5245 && word_mode == VOIDmode)
5246 word_mode = mode;
5247 }
5248
15ed7b52
JG
5249 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5250 mode != VOIDmode;
43fa6302
AS
5251 mode = GET_MODE_WIDER_MODE (mode))
5252 {
5253 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5254 && double_mode == VOIDmode)
5255 double_mode = mode;
5256 }
5257
5258 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5259
5da077de 5260#ifdef INIT_EXPANDERS
414c4dc4
NC
5261 /* This is to initialize {init|mark|free}_machine_status before the first
5262 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5263 end which calls push_function_context_to before the first call to
5da077de
AS
5264 init_function_start. */
5265 INIT_EXPANDERS;
5266#endif
5267
23b2ce53
RS
5268 /* Create the unique rtx's for certain rtx codes and operand values. */
5269
a2a8cc44 5270 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5271 tries to use these variables. */
23b2ce53 5272 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5273 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5274 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5275
68d75312
JC
5276 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5277 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5278 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5279 else
3b80f6ca 5280 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5281
5692c7bc
ZW
5282 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5283 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5284 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5285
5286 dconstm1 = dconst1;
5287 dconstm1.sign = 1;
03f2ea93
RS
5288
5289 dconsthalf = dconst1;
1e92bbb9 5290 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5291
f7657db9 5292 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5293 {
aefa9d43 5294 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5295 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5296
15ed7b52
JG
5297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5298 mode != VOIDmode;
5299 mode = GET_MODE_WIDER_MODE (mode))
5300 const_tiny_rtx[i][(int) mode] =
5301 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5302
5303 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5304 mode != VOIDmode;
23b2ce53 5305 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5306 const_tiny_rtx[i][(int) mode] =
5307 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5308
906c4e36 5309 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5310
15ed7b52
JG
5311 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5312 mode != VOIDmode;
23b2ce53 5313 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5314 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5315
5316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5317 mode != VOIDmode;
5318 mode = GET_MODE_WIDER_MODE (mode))
5319 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5320 }
5321
e90721b1
AP
5322 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5323 mode != VOIDmode;
5324 mode = GET_MODE_WIDER_MODE (mode))
5325 {
5326 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5327 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5328 }
5329
5330 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5331 mode != VOIDmode;
5332 mode = GET_MODE_WIDER_MODE (mode))
5333 {
5334 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5335 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5336 }
5337
69ef87e2
AH
5338 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5339 mode != VOIDmode;
5340 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5341 {
5342 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5343 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5344 }
69ef87e2
AH
5345
5346 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5347 mode != VOIDmode;
5348 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5349 {
5350 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5351 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5352 }
69ef87e2 5353
325217ed
CF
5354 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5355 mode != VOIDmode;
5356 mode = GET_MODE_WIDER_MODE (mode))
5357 {
5358 FCONST0(mode).data.high = 0;
5359 FCONST0(mode).data.low = 0;
5360 FCONST0(mode).mode = mode;
091a3ac7
CF
5361 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5362 FCONST0 (mode), mode);
325217ed
CF
5363 }
5364
5365 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5366 mode != VOIDmode;
5367 mode = GET_MODE_WIDER_MODE (mode))
5368 {
5369 FCONST0(mode).data.high = 0;
5370 FCONST0(mode).data.low = 0;
5371 FCONST0(mode).mode = mode;
091a3ac7
CF
5372 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5373 FCONST0 (mode), mode);
325217ed
CF
5374 }
5375
5376 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5377 mode != VOIDmode;
5378 mode = GET_MODE_WIDER_MODE (mode))
5379 {
5380 FCONST0(mode).data.high = 0;
5381 FCONST0(mode).data.low = 0;
5382 FCONST0(mode).mode = mode;
091a3ac7
CF
5383 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5384 FCONST0 (mode), mode);
325217ed
CF
5385
5386 /* We store the value 1. */
5387 FCONST1(mode).data.high = 0;
5388 FCONST1(mode).data.low = 0;
5389 FCONST1(mode).mode = mode;
5390 lshift_double (1, 0, GET_MODE_FBIT (mode),
5391 2 * HOST_BITS_PER_WIDE_INT,
5392 &FCONST1(mode).data.low,
5393 &FCONST1(mode).data.high,
5394 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5395 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5396 FCONST1 (mode), mode);
325217ed
CF
5397 }
5398
5399 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5400 mode != VOIDmode;
5401 mode = GET_MODE_WIDER_MODE (mode))
5402 {
5403 FCONST0(mode).data.high = 0;
5404 FCONST0(mode).data.low = 0;
5405 FCONST0(mode).mode = mode;
091a3ac7
CF
5406 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5407 FCONST0 (mode), mode);
325217ed
CF
5408
5409 /* We store the value 1. */
5410 FCONST1(mode).data.high = 0;
5411 FCONST1(mode).data.low = 0;
5412 FCONST1(mode).mode = mode;
5413 lshift_double (1, 0, GET_MODE_FBIT (mode),
5414 2 * HOST_BITS_PER_WIDE_INT,
5415 &FCONST1(mode).data.low,
5416 &FCONST1(mode).data.high,
5417 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5418 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5419 FCONST1 (mode), mode);
5420 }
5421
5422 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5423 mode != VOIDmode;
5424 mode = GET_MODE_WIDER_MODE (mode))
5425 {
5426 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5427 }
5428
5429 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5430 mode != VOIDmode;
5431 mode = GET_MODE_WIDER_MODE (mode))
5432 {
5433 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5434 }
5435
5436 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5437 mode != VOIDmode;
5438 mode = GET_MODE_WIDER_MODE (mode))
5439 {
5440 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5441 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5442 }
5443
5444 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5445 mode != VOIDmode;
5446 mode = GET_MODE_WIDER_MODE (mode))
5447 {
5448 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5449 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5450 }
5451
dbbbbf3b
JDA
5452 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5453 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5454 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5455
f0417c82
RH
5456 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5457 if (STORE_FLAG_VALUE == 1)
5458 const_tiny_rtx[1][(int) BImode] = const1_rtx;
23b2ce53 5459}
a11759a3 5460\f
969d70ca
JH
5461/* Produce exact duplicate of insn INSN after AFTER.
5462 Care updating of libcall regions if present. */
5463
5464rtx
502b8322 5465emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5466{
60564289 5467 rtx new_rtx, link;
969d70ca
JH
5468
5469 switch (GET_CODE (insn))
5470 {
5471 case INSN:
60564289 5472 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5473 break;
5474
5475 case JUMP_INSN:
60564289 5476 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5477 break;
5478
5479 case CALL_INSN:
60564289 5480 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5481 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5482 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5483 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5484 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5485 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5486 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5487 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 5488 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
5489 break;
5490
5491 default:
5b0264cb 5492 gcc_unreachable ();
969d70ca
JH
5493 }
5494
5495 /* Update LABEL_NUSES. */
60564289 5496 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 5497
60564289 5498 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ba4f7968 5499
0a3d71f5
JW
5500 /* If the old insn is frame related, then so is the new one. This is
5501 primarily needed for IA-64 unwind info which marks epilogue insns,
5502 which may be duplicated by the basic block reordering code. */
60564289 5503 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 5504
cf7c4aa6
HPN
5505 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5506 will make them. REG_LABEL_TARGETs are created there too, but are
5507 supposed to be sticky, so we copy them. */
969d70ca 5508 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 5509 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
5510 {
5511 if (GET_CODE (link) == EXPR_LIST)
60564289 5512 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 5513 copy_insn_1 (XEXP (link, 0)));
969d70ca 5514 else
60564289 5515 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
5516 }
5517
60564289
KG
5518 INSN_CODE (new_rtx) = INSN_CODE (insn);
5519 return new_rtx;
969d70ca 5520}
e2500fed 5521
1431042e 5522static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5523rtx
5524gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5525{
5526 if (hard_reg_clobbers[mode][regno])
5527 return hard_reg_clobbers[mode][regno];
5528 else
5529 return (hard_reg_clobbers[mode][regno] =
5530 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5531}
5532
e2500fed 5533#include "gt-emit-rtl.h"