]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
Fixed a couple of objctective-c bugs.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
43e05e45 3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
23b2ce53 4
1322177d 5This file is part of GCC.
23b2ce53 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
23b2ce53 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
23b2ce53
RS
16
17You should have received a copy of the GNU General Public License
1322177d 18along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA. */
23b2ce53
RS
21
22
23/* Middle-to-low level generation of rtx code and insns.
24
f822fcf7
KH
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
23b2ce53
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
01198c2f 40#include "toplev.h"
23b2ce53 41#include "rtl.h"
a25c7971 42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
23b2ce53
RS
44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
aff48bca 48#include "hard-reg-set.h"
c13e8210 49#include "hashtab.h"
23b2ce53 50#include "insn-config.h"
e9a25f70 51#include "recog.h"
23b2ce53 52#include "real.h"
0dfa1860 53#include "bitmap.h"
a05924f9 54#include "basic-block.h"
87ff9c8e 55#include "ggc.h"
e1772ac0 56#include "debug.h"
d23c55c2 57#include "langhooks.h"
ef330312 58#include "tree-pass.h"
ca695ac9 59
1d445e9e
ILT
60/* Commonly used modes. */
61
0f41302f
MS
62enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
63enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 64enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 65enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 66
23b2ce53
RS
67
68/* This is *not* reset after each function. It gives each CODE_LABEL
69 in the entire compilation a unique label number. */
70
044b4de3 71static GTY(()) int label_num = 1;
23b2ce53 72
23b2ce53
RS
73/* Nonzero means do not generate NOTEs for source line numbers. */
74
75static int no_line_numbers;
76
77/* Commonly used rtx's, so that we only need space for one copy.
78 These are initialized once for the entire compilation.
5692c7bc
ZW
79 All of these are unique; no other rtx-object will be equal to any
80 of these. */
23b2ce53 81
5da077de 82rtx global_rtl[GR_MAX];
23b2ce53 83
6cde4876
JL
84/* Commonly used RTL for hard registers. These objects are not necessarily
85 unique, so we allocate them separately from global_rtl. They are
86 initialized once per compilation unit, then copied into regno_reg_rtx
87 at the beginning of each function. */
88static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
89
23b2ce53
RS
90/* We record floating-point CONST_DOUBLEs in each floating-point mode for
91 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
92 record a copy of const[012]_rtx. */
93
94rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
95
68d75312
JC
96rtx const_true_rtx;
97
23b2ce53
RS
98REAL_VALUE_TYPE dconst0;
99REAL_VALUE_TYPE dconst1;
100REAL_VALUE_TYPE dconst2;
f7657db9
KG
101REAL_VALUE_TYPE dconst3;
102REAL_VALUE_TYPE dconst10;
23b2ce53 103REAL_VALUE_TYPE dconstm1;
03f2ea93
RS
104REAL_VALUE_TYPE dconstm2;
105REAL_VALUE_TYPE dconsthalf;
f7657db9 106REAL_VALUE_TYPE dconstthird;
ab01a87c
KG
107REAL_VALUE_TYPE dconstpi;
108REAL_VALUE_TYPE dconste;
23b2ce53
RS
109
110/* All references to the following fixed hard registers go through
111 these unique rtl objects. On machines where the frame-pointer and
112 arg-pointer are the same register, they use the same unique object.
113
114 After register allocation, other rtl objects which used to be pseudo-regs
115 may be clobbered to refer to the frame-pointer register.
116 But references that were originally to the frame-pointer can be
117 distinguished from the others because they contain frame_pointer_rtx.
118
ac6f08b0
DE
119 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
120 tricky: until register elimination has taken place hard_frame_pointer_rtx
750c9258 121 should be used if it is being set, and frame_pointer_rtx otherwise. After
ac6f08b0
DE
122 register elimination hard_frame_pointer_rtx should always be used.
123 On machines where the two registers are same (most) then these are the
124 same.
125
23b2ce53
RS
126 In an inline procedure, the stack and frame pointer rtxs may not be
127 used for anything else. */
23b2ce53
RS
128rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
131
a4417a86
JW
132/* This is used to implement __builtin_return_address for some machines.
133 See for instance the MIPS port. */
134rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
135
23b2ce53
RS
136/* We make one copy of (const_int C) where C is in
137 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138 to save space during the compilation and simplify comparisons of
139 integers. */
140
5da077de 141rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 142
c13e8210
MM
143/* A hash table storing CONST_INTs whose absolute value is greater
144 than MAX_SAVED_CONST_INT. */
145
e2500fed
GK
146static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
147 htab_t const_int_htab;
c13e8210 148
173b24b9 149/* A hash table storing memory attribute structures. */
e2500fed
GK
150static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
151 htab_t mem_attrs_htab;
173b24b9 152
a560d4d4
JH
153/* A hash table storing register attribute structures. */
154static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
155 htab_t reg_attrs_htab;
156
5692c7bc 157/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
158static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159 htab_t const_double_htab;
5692c7bc 160
01d939e8
BS
161#define first_insn (cfun->emit->x_first_insn)
162#define last_insn (cfun->emit->x_last_insn)
163#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
fd3acbb3 164#define last_location (cfun->emit->x_last_location)
01d939e8 165#define first_label_num (cfun->emit->x_first_label_num)
23b2ce53 166
502b8322
AJ
167static rtx make_call_insn_raw (rtx);
168static rtx find_line_note (rtx);
169static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
502b8322
AJ
170static void unshare_all_decls (tree);
171static void reset_used_decls (tree);
172static void mark_label_nuses (rtx);
173static hashval_t const_int_htab_hash (const void *);
174static int const_int_htab_eq (const void *, const void *);
175static hashval_t const_double_htab_hash (const void *);
176static int const_double_htab_eq (const void *, const void *);
177static rtx lookup_const_double (rtx);
178static hashval_t mem_attrs_htab_hash (const void *);
179static int mem_attrs_htab_eq (const void *, const void *);
180static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
181 enum machine_mode);
182static hashval_t reg_attrs_htab_hash (const void *);
183static int reg_attrs_htab_eq (const void *, const void *);
184static reg_attrs *get_reg_attrs (tree, int);
185static tree component_ref_for_mem_expr (tree);
a73b091d 186static rtx gen_const_vector (enum machine_mode, int);
32b32b16 187static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 188
6b24c259
JH
189/* Probability of the conditional branch currently proceeded by try_split.
190 Set to -1 otherwise. */
191int split_branch_probability = -1;
ca695ac9 192\f
c13e8210
MM
193/* Returns a hash code for X (which is a really a CONST_INT). */
194
195static hashval_t
502b8322 196const_int_htab_hash (const void *x)
c13e8210 197{
bcda12f4 198 return (hashval_t) INTVAL ((rtx) x);
c13e8210
MM
199}
200
cc2902df 201/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
202 CONST_INT) is the same as that given by Y (which is really a
203 HOST_WIDE_INT *). */
204
205static int
502b8322 206const_int_htab_eq (const void *x, const void *y)
c13e8210 207{
5692c7bc
ZW
208 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
209}
210
211/* Returns a hash code for X (which is really a CONST_DOUBLE). */
212static hashval_t
502b8322 213const_double_htab_hash (const void *x)
5692c7bc 214{
5692c7bc 215 rtx value = (rtx) x;
46b33600 216 hashval_t h;
5692c7bc 217
46b33600
RH
218 if (GET_MODE (value) == VOIDmode)
219 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
220 else
fe352c29 221 {
15c812e3 222 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
223 /* MODE is used in the comparison, so it should be in the hash. */
224 h ^= GET_MODE (value);
225 }
5692c7bc
ZW
226 return h;
227}
228
cc2902df 229/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
230 is the same as that represented by Y (really a ...) */
231static int
502b8322 232const_double_htab_eq (const void *x, const void *y)
5692c7bc
ZW
233{
234 rtx a = (rtx)x, b = (rtx)y;
5692c7bc
ZW
235
236 if (GET_MODE (a) != GET_MODE (b))
237 return 0;
8580f7a0
RH
238 if (GET_MODE (a) == VOIDmode)
239 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
240 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
241 else
242 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
243 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
244}
245
173b24b9
RK
246/* Returns a hash code for X (which is a really a mem_attrs *). */
247
248static hashval_t
502b8322 249mem_attrs_htab_hash (const void *x)
173b24b9
RK
250{
251 mem_attrs *p = (mem_attrs *) x;
252
253 return (p->alias ^ (p->align * 1000)
254 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
255 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
78b76d08 256 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
257}
258
cc2902df 259/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
260 mem_attrs *) is the same as that given by Y (which is also really a
261 mem_attrs *). */
c13e8210
MM
262
263static int
502b8322 264mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 265{
173b24b9
RK
266 mem_attrs *p = (mem_attrs *) x;
267 mem_attrs *q = (mem_attrs *) y;
268
78b76d08
SB
269 return (p->alias == q->alias && p->offset == q->offset
270 && p->size == q->size && p->align == q->align
271 && (p->expr == q->expr
272 || (p->expr != NULL_TREE && q->expr != NULL_TREE
273 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
274}
275
173b24b9 276/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
277 one identical to it is not already in the table. We are doing this for
278 MEM of mode MODE. */
173b24b9
RK
279
280static mem_attrs *
502b8322
AJ
281get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
282 unsigned int align, enum machine_mode mode)
173b24b9
RK
283{
284 mem_attrs attrs;
285 void **slot;
286
bb056a77
OH
287 /* If everything is the default, we can just return zero.
288 This must match what the corresponding MEM_* macros return when the
289 field is not present. */
998d7deb 290 if (alias == 0 && expr == 0 && offset == 0
10b76d73
RK
291 && (size == 0
292 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
293 && (STRICT_ALIGNMENT && mode != BLKmode
294 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
295 return 0;
296
173b24b9 297 attrs.alias = alias;
998d7deb 298 attrs.expr = expr;
173b24b9
RK
299 attrs.offset = offset;
300 attrs.size = size;
301 attrs.align = align;
302
303 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
304 if (*slot == 0)
305 {
306 *slot = ggc_alloc (sizeof (mem_attrs));
307 memcpy (*slot, &attrs, sizeof (mem_attrs));
308 }
309
310 return *slot;
c13e8210
MM
311}
312
a560d4d4
JH
313/* Returns a hash code for X (which is a really a reg_attrs *). */
314
315static hashval_t
502b8322 316reg_attrs_htab_hash (const void *x)
a560d4d4
JH
317{
318 reg_attrs *p = (reg_attrs *) x;
319
320 return ((p->offset * 1000) ^ (long) p->decl);
321}
322
6356f892 323/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
324 reg_attrs *) is the same as that given by Y (which is also really a
325 reg_attrs *). */
326
327static int
502b8322 328reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4
JH
329{
330 reg_attrs *p = (reg_attrs *) x;
331 reg_attrs *q = (reg_attrs *) y;
332
333 return (p->decl == q->decl && p->offset == q->offset);
334}
335/* Allocate a new reg_attrs structure and insert it into the hash table if
336 one identical to it is not already in the table. We are doing this for
337 MEM of mode MODE. */
338
339static reg_attrs *
502b8322 340get_reg_attrs (tree decl, int offset)
a560d4d4
JH
341{
342 reg_attrs attrs;
343 void **slot;
344
345 /* If everything is the default, we can just return zero. */
346 if (decl == 0 && offset == 0)
347 return 0;
348
349 attrs.decl = decl;
350 attrs.offset = offset;
351
352 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
353 if (*slot == 0)
354 {
355 *slot = ggc_alloc (sizeof (reg_attrs));
356 memcpy (*slot, &attrs, sizeof (reg_attrs));
357 }
358
359 return *slot;
360}
361
08394eef
BS
362/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
363 don't attempt to share with the various global pieces of rtl (such as
364 frame_pointer_rtx). */
365
366rtx
502b8322 367gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
368{
369 rtx x = gen_rtx_raw_REG (mode, regno);
370 ORIGINAL_REGNO (x) = regno;
371 return x;
372}
373
c5c76735
JL
374/* There are some RTL codes that require special attention; the generation
375 functions do the raw handling. If you add to this list, modify
376 special_rtx in gengenrtl.c as well. */
377
3b80f6ca 378rtx
502b8322 379gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 380{
c13e8210
MM
381 void **slot;
382
3b80f6ca 383 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 384 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
385
386#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
387 if (const_true_rtx && arg == STORE_FLAG_VALUE)
388 return const_true_rtx;
389#endif
390
c13e8210 391 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
392 slot = htab_find_slot_with_hash (const_int_htab, &arg,
393 (hashval_t) arg, INSERT);
29105cea 394 if (*slot == 0)
1f8f4a0b 395 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
396
397 return (rtx) *slot;
3b80f6ca
RH
398}
399
2496c7bd 400rtx
502b8322 401gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
402{
403 return GEN_INT (trunc_int_for_mode (c, mode));
404}
405
5692c7bc
ZW
406/* CONST_DOUBLEs might be created from pairs of integers, or from
407 REAL_VALUE_TYPEs. Also, their length is known only at run time,
408 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
409
410/* Determine whether REAL, a CONST_DOUBLE, already exists in the
411 hash table. If so, return its counterpart; otherwise add it
412 to the hash table and return it. */
413static rtx
502b8322 414lookup_const_double (rtx real)
5692c7bc
ZW
415{
416 void **slot = htab_find_slot (const_double_htab, real, INSERT);
417 if (*slot == 0)
418 *slot = real;
419
420 return (rtx) *slot;
421}
29105cea 422
5692c7bc
ZW
423/* Return a CONST_DOUBLE rtx for a floating-point value specified by
424 VALUE in mode MODE. */
0133b7d9 425rtx
502b8322 426const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 427{
5692c7bc
ZW
428 rtx real = rtx_alloc (CONST_DOUBLE);
429 PUT_MODE (real, mode);
430
9e254451 431 real->u.rv = value;
5692c7bc
ZW
432
433 return lookup_const_double (real);
434}
435
436/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
437 of ints: I0 is the low-order word and I1 is the high-order word.
438 Do not use this routine for non-integer modes; convert to
439 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
440
441rtx
502b8322 442immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
443{
444 rtx value;
445 unsigned int i;
446
65acccdd
ZD
447 /* There are the following cases (note that there are no modes with
448 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
449
450 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
451 gen_int_mode.
452 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
453 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
454 from copies of the sign bit, and sign of i0 and i1 are the same), then
455 we return a CONST_INT for i0.
456 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
457 if (mode != VOIDmode)
458 {
5b0264cb
NS
459 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
460 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
461 /* We can get a 0 for an error mark. */
462 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
463 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 464
65acccdd
ZD
465 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
466 return gen_int_mode (i0, mode);
467
468 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
5692c7bc
ZW
469 }
470
471 /* If this integer fits in one word, return a CONST_INT. */
472 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
473 return GEN_INT (i0);
474
475 /* We use VOIDmode for integers. */
476 value = rtx_alloc (CONST_DOUBLE);
477 PUT_MODE (value, VOIDmode);
478
479 CONST_DOUBLE_LOW (value) = i0;
480 CONST_DOUBLE_HIGH (value) = i1;
481
482 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
483 XWINT (value, i) = 0;
484
485 return lookup_const_double (value);
0133b7d9
RH
486}
487
3b80f6ca 488rtx
502b8322 489gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
490{
491 /* In case the MD file explicitly references the frame pointer, have
492 all such references point to the same frame pointer. This is
493 used during frame pointer elimination to distinguish the explicit
494 references to these registers from pseudos that happened to be
495 assigned to them.
496
497 If we have eliminated the frame pointer or arg pointer, we will
498 be using it as a normal register, for example as a spill
499 register. In such cases, we might be accessing it in a mode that
500 is not Pmode and therefore cannot use the pre-allocated rtx.
501
502 Also don't do this when we are making new REGs in reload, since
503 we don't want to get confused with the real pointers. */
504
505 if (mode == Pmode && !reload_in_progress)
506 {
e10c79fe
LB
507 if (regno == FRAME_POINTER_REGNUM
508 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
509 return frame_pointer_rtx;
510#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e10c79fe
LB
511 if (regno == HARD_FRAME_POINTER_REGNUM
512 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
513 return hard_frame_pointer_rtx;
514#endif
515#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 516 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
517 return arg_pointer_rtx;
518#endif
519#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 520 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
521 return return_address_pointer_rtx;
522#endif
fc555370 523 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
2d67bd7b 524 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 525 return pic_offset_table_rtx;
bcb33994 526 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
527 return stack_pointer_rtx;
528 }
529
006a94b0 530#if 0
6cde4876 531 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
532 an existing entry in that table to avoid useless generation of RTL.
533
534 This code is disabled for now until we can fix the various backends
535 which depend on having non-shared hard registers in some cases. Long
536 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
537 on the amount of useless RTL that gets generated.
538
539 We'll also need to fix some code that runs after reload that wants to
540 set ORIGINAL_REGNO. */
541
6cde4876
JL
542 if (cfun
543 && cfun->emit
544 && regno_reg_rtx
545 && regno < FIRST_PSEUDO_REGISTER
546 && reg_raw_mode[regno] == mode)
547 return regno_reg_rtx[regno];
006a94b0 548#endif
6cde4876 549
08394eef 550 return gen_raw_REG (mode, regno);
3b80f6ca
RH
551}
552
41472af8 553rtx
502b8322 554gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
555{
556 rtx rt = gen_rtx_raw_MEM (mode, addr);
557
558 /* This field is not cleared by the mere allocation of the rtx, so
559 we clear it here. */
173b24b9 560 MEM_ATTRS (rt) = 0;
41472af8
MM
561
562 return rt;
563}
ddef6bc7 564
542a8afa
RH
565/* Generate a memory referring to non-trapping constant memory. */
566
567rtx
568gen_const_mem (enum machine_mode mode, rtx addr)
569{
570 rtx mem = gen_rtx_MEM (mode, addr);
571 MEM_READONLY_P (mem) = 1;
572 MEM_NOTRAP_P (mem) = 1;
573 return mem;
574}
575
bf877a76
R
576/* Generate a MEM referring to fixed portions of the frame, e.g., register
577 save areas. */
578
579rtx
580gen_frame_mem (enum machine_mode mode, rtx addr)
581{
582 rtx mem = gen_rtx_MEM (mode, addr);
583 MEM_NOTRAP_P (mem) = 1;
584 set_mem_alias_set (mem, get_frame_alias_set ());
585 return mem;
586}
587
588/* Generate a MEM referring to a temporary use of the stack, not part
589 of the fixed stack frame. For example, something which is pushed
590 by a target splitter. */
591rtx
592gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
593{
594 rtx mem = gen_rtx_MEM (mode, addr);
595 MEM_NOTRAP_P (mem) = 1;
596 if (!current_function_calls_alloca)
597 set_mem_alias_set (mem, get_frame_alias_set ());
598 return mem;
599}
600
beb72684
RH
601/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
602 this construct would be valid, and false otherwise. */
603
604bool
605validate_subreg (enum machine_mode omode, enum machine_mode imode,
606 rtx reg, unsigned int offset)
ddef6bc7 607{
beb72684
RH
608 unsigned int isize = GET_MODE_SIZE (imode);
609 unsigned int osize = GET_MODE_SIZE (omode);
610
611 /* All subregs must be aligned. */
612 if (offset % osize != 0)
613 return false;
614
615 /* The subreg offset cannot be outside the inner object. */
616 if (offset >= isize)
617 return false;
618
619 /* ??? This should not be here. Temporarily continue to allow word_mode
620 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
621 Generally, backends are doing something sketchy but it'll take time to
622 fix them all. */
623 if (omode == word_mode)
624 ;
625 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
626 is the culprit here, and not the backends. */
627 else if (osize >= UNITS_PER_WORD && isize >= osize)
628 ;
629 /* Allow component subregs of complex and vector. Though given the below
630 extraction rules, it's not always clear what that means. */
631 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
632 && GET_MODE_INNER (imode) == omode)
633 ;
634 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
635 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
636 represent this. It's questionable if this ought to be represented at
637 all -- why can't this all be hidden in post-reload splitters that make
638 arbitrarily mode changes to the registers themselves. */
639 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
640 ;
641 /* Subregs involving floating point modes are not allowed to
642 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
643 (subreg:SI (reg:DF) 0) isn't. */
644 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
645 {
646 if (isize != osize)
647 return false;
648 }
ddef6bc7 649
beb72684
RH
650 /* Paradoxical subregs must have offset zero. */
651 if (osize > isize)
652 return offset == 0;
653
654 /* This is a normal subreg. Verify that the offset is representable. */
655
656 /* For hard registers, we already have most of these rules collected in
657 subreg_offset_representable_p. */
658 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
659 {
660 unsigned int regno = REGNO (reg);
661
662#ifdef CANNOT_CHANGE_MODE_CLASS
663 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
664 && GET_MODE_INNER (imode) == omode)
665 ;
666 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
667 return false;
ddef6bc7 668#endif
beb72684
RH
669
670 return subreg_offset_representable_p (regno, imode, offset, omode);
671 }
672
673 /* For pseudo registers, we want most of the same checks. Namely:
674 If the register no larger than a word, the subreg must be lowpart.
675 If the register is larger than a word, the subreg must be the lowpart
676 of a subword. A subreg does *not* perform arbitrary bit extraction.
677 Given that we've already checked mode/offset alignment, we only have
678 to check subword subregs here. */
679 if (osize < UNITS_PER_WORD)
680 {
681 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
682 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
683 if (offset % UNITS_PER_WORD != low_off)
684 return false;
685 }
686 return true;
687}
688
689rtx
690gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
691{
692 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 693 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
694}
695
173b24b9
RK
696/* Generate a SUBREG representing the least-significant part of REG if MODE
697 is smaller than mode of REG, otherwise paradoxical SUBREG. */
698
ddef6bc7 699rtx
502b8322 700gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
701{
702 enum machine_mode inmode;
ddef6bc7
JJ
703
704 inmode = GET_MODE (reg);
705 if (inmode == VOIDmode)
706 inmode = mode;
e0e08ac2
JH
707 return gen_rtx_SUBREG (mode, reg,
708 subreg_lowpart_offset (mode, inmode));
ddef6bc7 709}
c5c76735 710\f
23b2ce53
RS
711/* gen_rtvec (n, [rt1, ..., rtn])
712**
713** This routine creates an rtvec and stores within it the
714** pointers to rtx's which are its arguments.
715*/
716
717/*VARARGS1*/
718rtvec
e34d07f2 719gen_rtvec (int n, ...)
23b2ce53 720{
6268b922 721 int i, save_n;
23b2ce53 722 rtx *vector;
e34d07f2 723 va_list p;
23b2ce53 724
e34d07f2 725 va_start (p, n);
23b2ce53
RS
726
727 if (n == 0)
728 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
729
703ad42b 730 vector = alloca (n * sizeof (rtx));
4f90e4a0 731
23b2ce53
RS
732 for (i = 0; i < n; i++)
733 vector[i] = va_arg (p, rtx);
6268b922
KG
734
735 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
736 save_n = n;
e34d07f2 737 va_end (p);
23b2ce53 738
6268b922 739 return gen_rtvec_v (save_n, vector);
23b2ce53
RS
740}
741
742rtvec
502b8322 743gen_rtvec_v (int n, rtx *argp)
23b2ce53 744{
b3694847
SS
745 int i;
746 rtvec rt_val;
23b2ce53
RS
747
748 if (n == 0)
749 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
750
751 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
752
753 for (i = 0; i < n; i++)
8f985ec4 754 rt_val->elem[i] = *argp++;
23b2ce53
RS
755
756 return rt_val;
757}
758\f
759/* Generate a REG rtx for a new pseudo register of mode MODE.
760 This pseudo is assigned the next sequential register number. */
761
762rtx
502b8322 763gen_reg_rtx (enum machine_mode mode)
23b2ce53 764{
01d939e8 765 struct function *f = cfun;
b3694847 766 rtx val;
23b2ce53 767
f1db3576
JL
768 /* Don't let anything called after initial flow analysis create new
769 registers. */
5b0264cb 770 gcc_assert (!no_new_pseudos);
23b2ce53 771
1b3d8f8a
GK
772 if (generating_concat_p
773 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
774 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
775 {
776 /* For complex modes, don't make a single pseudo.
777 Instead, make a CONCAT of two pseudos.
778 This allows noncontiguous allocation of the real and imaginary parts,
779 which makes much better code. Besides, allocating DCmode
780 pseudos overstrains reload on some machines like the 386. */
781 rtx realpart, imagpart;
27e58a70 782 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
783
784 realpart = gen_reg_rtx (partmode);
785 imagpart = gen_reg_rtx (partmode);
3b80f6ca 786 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
787 }
788
a560d4d4 789 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 790 enough to have an element for this pseudo reg number. */
23b2ce53 791
3502dc9c 792 if (reg_rtx_no == f->emit->regno_pointer_align_length)
23b2ce53 793 {
3502dc9c 794 int old_size = f->emit->regno_pointer_align_length;
e2ecd91c 795 char *new;
0d4903b8 796 rtx *new1;
0d4903b8 797
e2500fed 798 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
49ad7cfa 799 memset (new + old_size, 0, old_size);
f9e158c3 800 f->emit->regno_pointer_align = (unsigned char *) new;
49ad7cfa 801
703ad42b
KG
802 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
803 old_size * 2 * sizeof (rtx));
49ad7cfa 804 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
805 regno_reg_rtx = new1;
806
3502dc9c 807 f->emit->regno_pointer_align_length = old_size * 2;
23b2ce53
RS
808 }
809
08394eef 810 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
811 regno_reg_rtx[reg_rtx_no++] = val;
812 return val;
813}
814
e314a036
JZ
815/* Generate a register with same attributes as REG, but offsetted by OFFSET.
816 Do the big endian correction if needed. */
a560d4d4
JH
817
818rtx
502b8322 819gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
a560d4d4
JH
820{
821 rtx new = gen_rtx_REG (mode, regno);
e314a036
JZ
822 tree decl;
823 HOST_WIDE_INT var_size;
824
825 /* PR middle-end/14084
826 The problem appears when a variable is stored in a larger register
827 and later it is used in the original mode or some mode in between
828 or some part of variable is accessed.
829
830 On little endian machines there is no problem because
831 the REG_OFFSET of the start of the variable is the same when
832 accessed in any mode (it is 0).
833
834 However, this is not true on big endian machines.
835 The offset of the start of the variable is different when accessed
836 in different modes.
837 When we are taking a part of the REG we have to change the OFFSET
838 from offset WRT size of mode of REG to offset WRT size of variable.
839
840 If we would not do the big endian correction the resulting REG_OFFSET
841 would be larger than the size of the DECL.
842
843 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
844
845 REG.mode MODE DECL size old offset new offset description
846 DI SI 4 4 0 int32 in SImode
847 DI SI 1 4 0 char in SImode
848 DI QI 1 7 0 char in QImode
849 DI QI 4 5 1 1st element in QImode
850 of char[4]
851 DI HI 4 6 2 1st element in HImode
852 of int16[2]
853
854 If the size of DECL is equal or greater than the size of REG
855 we can't do this correction because the register holds the
856 whole variable or a part of the variable and thus the REG_OFFSET
857 is already correct. */
858
859 decl = REG_EXPR (reg);
860 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
861 && decl != NULL
862 && offset > 0
863 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
864 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
865 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
866 {
867 int offset_le;
868
869 /* Convert machine endian to little endian WRT size of mode of REG. */
870 if (WORDS_BIG_ENDIAN)
871 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
872 / UNITS_PER_WORD) * UNITS_PER_WORD;
873 else
874 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
875
876 if (BYTES_BIG_ENDIAN)
877 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
878 % UNITS_PER_WORD);
879 else
880 offset_le += offset % UNITS_PER_WORD;
881
882 if (offset_le >= var_size)
883 {
884 /* MODE is wider than the variable so the new reg will cover
885 the whole variable so the resulting OFFSET should be 0. */
886 offset = 0;
887 }
888 else
889 {
890 /* Convert little endian to machine endian WRT size of variable. */
891 if (WORDS_BIG_ENDIAN)
892 offset = ((var_size - 1 - offset_le)
893 / UNITS_PER_WORD) * UNITS_PER_WORD;
894 else
895 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
896
897 if (BYTES_BIG_ENDIAN)
898 offset += ((var_size - 1 - offset_le)
899 % UNITS_PER_WORD);
900 else
901 offset += offset_le % UNITS_PER_WORD;
902 }
903 }
904
a560d4d4 905 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
502b8322 906 REG_OFFSET (reg) + offset);
a560d4d4
JH
907 return new;
908}
909
910/* Set the decl for MEM to DECL. */
911
912void
502b8322 913set_reg_attrs_from_mem (rtx reg, rtx mem)
a560d4d4
JH
914{
915 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
916 REG_ATTRS (reg)
917 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
918}
919
9d18e06b
JZ
920/* Set the register attributes for registers contained in PARM_RTX.
921 Use needed values from memory attributes of MEM. */
922
923void
502b8322 924set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 925{
f8cfc6aa 926 if (REG_P (parm_rtx))
9d18e06b
JZ
927 set_reg_attrs_from_mem (parm_rtx, mem);
928 else if (GET_CODE (parm_rtx) == PARALLEL)
929 {
930 /* Check for a NULL entry in the first slot, used to indicate that the
931 parameter goes both on the stack and in registers. */
932 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
933 for (; i < XVECLEN (parm_rtx, 0); i++)
934 {
935 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 936 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
937 REG_ATTRS (XEXP (x, 0))
938 = get_reg_attrs (MEM_EXPR (mem),
939 INTVAL (XEXP (x, 1)));
940 }
941 }
942}
943
a560d4d4
JH
944/* Assign the RTX X to declaration T. */
945void
502b8322 946set_decl_rtl (tree t, rtx x)
a560d4d4 947{
820cc88f 948 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
a560d4d4 949
fbe6ec81
JZ
950 if (!x)
951 return;
952 /* For register, we maintain the reverse information too. */
f8cfc6aa 953 if (REG_P (x))
fbe6ec81
JZ
954 REG_ATTRS (x) = get_reg_attrs (t, 0);
955 else if (GET_CODE (x) == SUBREG)
956 REG_ATTRS (SUBREG_REG (x))
957 = get_reg_attrs (t, -SUBREG_BYTE (x));
958 if (GET_CODE (x) == CONCAT)
959 {
960 if (REG_P (XEXP (x, 0)))
961 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
962 if (REG_P (XEXP (x, 1)))
963 REG_ATTRS (XEXP (x, 1))
964 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
965 }
966 if (GET_CODE (x) == PARALLEL)
967 {
968 int i;
969 for (i = 0; i < XVECLEN (x, 0); i++)
970 {
971 rtx y = XVECEXP (x, 0, i);
972 if (REG_P (XEXP (y, 0)))
973 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
974 }
975 }
976}
977
978/* Assign the RTX X to parameter declaration T. */
979void
980set_decl_incoming_rtl (tree t, rtx x)
981{
982 DECL_INCOMING_RTL (t) = x;
983
a560d4d4
JH
984 if (!x)
985 return;
4d6922ee 986 /* For register, we maintain the reverse information too. */
f8cfc6aa 987 if (REG_P (x))
a560d4d4
JH
988 REG_ATTRS (x) = get_reg_attrs (t, 0);
989 else if (GET_CODE (x) == SUBREG)
990 REG_ATTRS (SUBREG_REG (x))
991 = get_reg_attrs (t, -SUBREG_BYTE (x));
992 if (GET_CODE (x) == CONCAT)
993 {
994 if (REG_P (XEXP (x, 0)))
995 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
996 if (REG_P (XEXP (x, 1)))
997 REG_ATTRS (XEXP (x, 1))
998 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
999 }
1000 if (GET_CODE (x) == PARALLEL)
1001 {
d4afac5b
JZ
1002 int i, start;
1003
1004 /* Check for a NULL entry, used to indicate that the parameter goes
1005 both on the stack and in registers. */
1006 if (XEXP (XVECEXP (x, 0, 0), 0))
1007 start = 0;
1008 else
1009 start = 1;
1010
1011 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1012 {
1013 rtx y = XVECEXP (x, 0, i);
1014 if (REG_P (XEXP (y, 0)))
1015 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1016 }
1017 }
1018}
1019
754fdcca
RK
1020/* Identify REG (which may be a CONCAT) as a user register. */
1021
1022void
502b8322 1023mark_user_reg (rtx reg)
754fdcca
RK
1024{
1025 if (GET_CODE (reg) == CONCAT)
1026 {
1027 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1028 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1029 }
754fdcca 1030 else
5b0264cb
NS
1031 {
1032 gcc_assert (REG_P (reg));
1033 REG_USERVAR_P (reg) = 1;
1034 }
754fdcca
RK
1035}
1036
86fe05e0
RK
1037/* Identify REG as a probable pointer register and show its alignment
1038 as ALIGN, if nonzero. */
23b2ce53
RS
1039
1040void
502b8322 1041mark_reg_pointer (rtx reg, int align)
23b2ce53 1042{
3502dc9c 1043 if (! REG_POINTER (reg))
00995e78 1044 {
3502dc9c 1045 REG_POINTER (reg) = 1;
86fe05e0 1046
00995e78
RE
1047 if (align)
1048 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1049 }
1050 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1051 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1052 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1053}
1054
1055/* Return 1 plus largest pseudo reg number used in the current function. */
1056
1057int
502b8322 1058max_reg_num (void)
23b2ce53
RS
1059{
1060 return reg_rtx_no;
1061}
1062
1063/* Return 1 + the largest label number used so far in the current function. */
1064
1065int
502b8322 1066max_label_num (void)
23b2ce53 1067{
23b2ce53
RS
1068 return label_num;
1069}
1070
1071/* Return first label number used in this function (if any were used). */
1072
1073int
502b8322 1074get_first_label_num (void)
23b2ce53
RS
1075{
1076 return first_label_num;
1077}
6de9cd9a
DN
1078
1079/* If the rtx for label was created during the expansion of a nested
1080 function, then first_label_num won't include this label number.
1081 Fix this now so that array indicies work later. */
1082
1083void
1084maybe_set_first_label_num (rtx x)
1085{
1086 if (CODE_LABEL_NUMBER (x) < first_label_num)
1087 first_label_num = CODE_LABEL_NUMBER (x);
1088}
23b2ce53
RS
1089\f
1090/* Return a value representing some low-order bits of X, where the number
1091 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1092 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1093 representation is returned.
1094
1095 This function handles the cases in common between gen_lowpart, below,
1096 and two variants in cse.c and combine.c. These are the cases that can
1097 be safely handled at all points in the compilation.
1098
1099 If this is not a case we can handle, return 0. */
1100
1101rtx
502b8322 1102gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1103{
ddef6bc7 1104 int msize = GET_MODE_SIZE (mode);
550d1387 1105 int xsize;
ddef6bc7 1106 int offset = 0;
550d1387
GK
1107 enum machine_mode innermode;
1108
1109 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1110 so we have to make one up. Yuk. */
1111 innermode = GET_MODE (x);
db487452
R
1112 if (GET_CODE (x) == CONST_INT
1113 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1114 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1115 else if (innermode == VOIDmode)
1116 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1117
1118 xsize = GET_MODE_SIZE (innermode);
1119
5b0264cb 1120 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1121
550d1387 1122 if (innermode == mode)
23b2ce53
RS
1123 return x;
1124
1125 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1126 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1127 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1128 return 0;
1129
53501a19 1130 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1131 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1132 return 0;
1133
550d1387 1134 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1135
1136 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1137 && (GET_MODE_CLASS (mode) == MODE_INT
1138 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1139 {
1140 /* If we are getting the low-order part of something that has been
1141 sign- or zero-extended, we can either just use the object being
1142 extended or make a narrower extension. If we want an even smaller
1143 piece than the size of the object being extended, call ourselves
1144 recursively.
1145
1146 This case is used mostly by combine and cse. */
1147
1148 if (GET_MODE (XEXP (x, 0)) == mode)
1149 return XEXP (x, 0);
550d1387 1150 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1151 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1152 else if (msize < xsize)
3b80f6ca 1153 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1154 }
f8cfc6aa 1155 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387
GK
1156 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1157 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1158 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1159
23b2ce53
RS
1160 /* Otherwise, we can't do this. */
1161 return 0;
1162}
1163\f
ccba022b 1164rtx
502b8322 1165gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1166{
ddef6bc7 1167 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1168 rtx result;
ddef6bc7 1169
ccba022b
RS
1170 /* This case loses if X is a subreg. To catch bugs early,
1171 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1172 gcc_assert (msize <= UNITS_PER_WORD
1173 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1174
e0e08ac2
JH
1175 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1176 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb
NS
1177 gcc_assert (result);
1178
09482e0d
JW
1179 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1180 the target if we have a MEM. gen_highpart must return a valid operand,
1181 emitting code if necessary to do so. */
5b0264cb
NS
1182 if (MEM_P (result))
1183 {
1184 result = validize_mem (result);
1185 gcc_assert (result);
1186 }
1187
e0e08ac2
JH
1188 return result;
1189}
5222e470 1190
26d249eb 1191/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1192 be VOIDmode constant. */
1193rtx
502b8322 1194gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1195{
1196 if (GET_MODE (exp) != VOIDmode)
1197 {
5b0264cb 1198 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1199 return gen_highpart (outermode, exp);
1200 }
1201 return simplify_gen_subreg (outermode, exp, innermode,
1202 subreg_highpart_offset (outermode, innermode));
1203}
68252e27 1204
e0e08ac2
JH
1205/* Return offset in bytes to get OUTERMODE low part
1206 of the value in mode INNERMODE stored in memory in target format. */
8698cce3 1207
e0e08ac2 1208unsigned int
502b8322 1209subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1210{
1211 unsigned int offset = 0;
1212 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1213
e0e08ac2 1214 if (difference > 0)
ccba022b 1215 {
e0e08ac2
JH
1216 if (WORDS_BIG_ENDIAN)
1217 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1218 if (BYTES_BIG_ENDIAN)
1219 offset += difference % UNITS_PER_WORD;
ccba022b 1220 }
ddef6bc7 1221
e0e08ac2 1222 return offset;
ccba022b 1223}
eea50aa0 1224
e0e08ac2
JH
1225/* Return offset in bytes to get OUTERMODE high part
1226 of the value in mode INNERMODE stored in memory in target format. */
1227unsigned int
502b8322 1228subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1229{
1230 unsigned int offset = 0;
1231 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1232
5b0264cb 1233 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1234
eea50aa0
JH
1235 if (difference > 0)
1236 {
e0e08ac2 1237 if (! WORDS_BIG_ENDIAN)
eea50aa0 1238 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1239 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1240 offset += difference % UNITS_PER_WORD;
1241 }
1242
e0e08ac2 1243 return offset;
eea50aa0 1244}
ccba022b 1245
23b2ce53
RS
1246/* Return 1 iff X, assumed to be a SUBREG,
1247 refers to the least significant part of its containing reg.
1248 If X is not a SUBREG, always return 1 (it is its own low part!). */
1249
1250int
502b8322 1251subreg_lowpart_p (rtx x)
23b2ce53
RS
1252{
1253 if (GET_CODE (x) != SUBREG)
1254 return 1;
a3a03040
RK
1255 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1256 return 0;
23b2ce53 1257
e0e08ac2
JH
1258 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1259 == SUBREG_BYTE (x));
23b2ce53
RS
1260}
1261\f
ddef6bc7
JJ
1262/* Return subword OFFSET of operand OP.
1263 The word number, OFFSET, is interpreted as the word number starting
1264 at the low-order address. OFFSET 0 is the low-order word if not
1265 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1266
1267 If we cannot extract the required word, we return zero. Otherwise,
1268 an rtx corresponding to the requested word will be returned.
1269
1270 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1271 reload has completed, a valid address will always be returned. After
1272 reload, if a valid address cannot be returned, we return zero.
1273
1274 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1275 it is the responsibility of the caller.
1276
1277 MODE is the mode of OP in case it is a CONST_INT.
1278
1279 ??? This is still rather broken for some cases. The problem for the
1280 moment is that all callers of this thing provide no 'goal mode' to
1281 tell us to work with. This exists because all callers were written
0631e0bf
JH
1282 in a word based SUBREG world.
1283 Now use of this function can be deprecated by simplify_subreg in most
1284 cases.
1285 */
ddef6bc7
JJ
1286
1287rtx
502b8322 1288operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1289{
1290 if (mode == VOIDmode)
1291 mode = GET_MODE (op);
1292
5b0264cb 1293 gcc_assert (mode != VOIDmode);
ddef6bc7 1294
30f7a378 1295 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1296 if (mode != BLKmode
1297 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1298 return 0;
1299
30f7a378 1300 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1301 if (mode != BLKmode
1302 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1303 return const0_rtx;
1304
ddef6bc7 1305 /* Form a new MEM at the requested address. */
3c0cb5de 1306 if (MEM_P (op))
ddef6bc7 1307 {
f1ec5147 1308 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1309
f1ec5147
RK
1310 if (! validate_address)
1311 return new;
1312
1313 else if (reload_completed)
ddef6bc7 1314 {
f1ec5147
RK
1315 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1316 return 0;
ddef6bc7 1317 }
f1ec5147
RK
1318 else
1319 return replace_equiv_address (new, XEXP (new, 0));
ddef6bc7
JJ
1320 }
1321
0631e0bf
JH
1322 /* Rest can be handled by simplify_subreg. */
1323 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1324}
1325
535a42b1
NS
1326/* Similar to `operand_subword', but never return 0. If we can't
1327 extract the required subword, put OP into a register and try again.
1328 The second attempt must succeed. We always validate the address in
1329 this case.
23b2ce53
RS
1330
1331 MODE is the mode of OP, in case it is CONST_INT. */
1332
1333rtx
502b8322 1334operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1335{
ddef6bc7 1336 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1337
1338 if (result)
1339 return result;
1340
1341 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1342 {
1343 /* If this is a register which can not be accessed by words, copy it
1344 to a pseudo register. */
f8cfc6aa 1345 if (REG_P (op))
77e6b0eb
JC
1346 op = copy_to_reg (op);
1347 else
1348 op = force_reg (mode, op);
1349 }
23b2ce53 1350
ddef6bc7 1351 result = operand_subword (op, offset, 1, mode);
5b0264cb 1352 gcc_assert (result);
23b2ce53
RS
1353
1354 return result;
1355}
1356\f
998d7deb
RH
1357/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1358 or (2) a component ref of something variable. Represent the later with
1359 a NULL expression. */
1360
1361static tree
502b8322 1362component_ref_for_mem_expr (tree ref)
998d7deb
RH
1363{
1364 tree inner = TREE_OPERAND (ref, 0);
1365
1366 if (TREE_CODE (inner) == COMPONENT_REF)
1367 inner = component_ref_for_mem_expr (inner);
c56e3582
RK
1368 else
1369 {
c56e3582 1370 /* Now remove any conversions: they don't change what the underlying
6fce44af 1371 object is. Likewise for SAVE_EXPR. */
c56e3582
RK
1372 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1373 || TREE_CODE (inner) == NON_LVALUE_EXPR
1374 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
6fce44af
RK
1375 || TREE_CODE (inner) == SAVE_EXPR)
1376 inner = TREE_OPERAND (inner, 0);
c56e3582
RK
1377
1378 if (! DECL_P (inner))
1379 inner = NULL_TREE;
1380 }
998d7deb
RH
1381
1382 if (inner == TREE_OPERAND (ref, 0))
1383 return ref;
1384 else
3244e67d
RS
1385 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1386 TREE_OPERAND (ref, 1), NULL_TREE);
998d7deb 1387}
173b24b9 1388
2b3493c8
AK
1389/* Returns 1 if both MEM_EXPR can be considered equal
1390 and 0 otherwise. */
1391
1392int
1393mem_expr_equal_p (tree expr1, tree expr2)
1394{
1395 if (expr1 == expr2)
1396 return 1;
1397
1398 if (! expr1 || ! expr2)
1399 return 0;
1400
1401 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1402 return 0;
1403
1404 if (TREE_CODE (expr1) == COMPONENT_REF)
1405 return
1406 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1407 TREE_OPERAND (expr2, 0))
1408 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1409 TREE_OPERAND (expr2, 1));
1410
1b096a0a 1411 if (INDIRECT_REF_P (expr1))
2b3493c8
AK
1412 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1413 TREE_OPERAND (expr2, 0));
2b3493c8 1414
5b0264cb 1415 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
2b3493c8 1416 have been resolved here. */
5b0264cb
NS
1417 gcc_assert (DECL_P (expr1));
1418
1419 /* Decls with different pointers can't be equal. */
1420 return 0;
2b3493c8
AK
1421}
1422
173b24b9
RK
1423/* Given REF, a MEM, and T, either the type of X or the expression
1424 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1425 if we are making a new object of this type. BITPOS is nonzero if
1426 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1427
1428void
502b8322
AJ
1429set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1430 HOST_WIDE_INT bitpos)
173b24b9 1431{
8ac61af7 1432 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
998d7deb 1433 tree expr = MEM_EXPR (ref);
8ac61af7
RK
1434 rtx offset = MEM_OFFSET (ref);
1435 rtx size = MEM_SIZE (ref);
1436 unsigned int align = MEM_ALIGN (ref);
6f1087be 1437 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1438 tree type;
1439
1440 /* It can happen that type_for_mode was given a mode for which there
1441 is no language-level type. In which case it returns NULL, which
1442 we can see here. */
1443 if (t == NULL_TREE)
1444 return;
1445
1446 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1447 if (type == error_mark_node)
1448 return;
173b24b9 1449
173b24b9
RK
1450 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1451 wrong answer, as it assumes that DECL_RTL already has the right alias
1452 info. Callers should not set DECL_RTL until after the call to
1453 set_mem_attributes. */
5b0264cb 1454 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1455
738cc472 1456 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1457 front-end routine) and use it. */
1458 alias = get_alias_set (t);
173b24b9 1459
a5e9c810 1460 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
173b24b9 1461 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
f8ad8d7c 1462 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1463
8ac61af7
RK
1464 /* If we are making an object of this type, or if this is a DECL, we know
1465 that it is a scalar if the type is not an aggregate. */
1466 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
173b24b9
RK
1467 MEM_SCALAR_P (ref) = 1;
1468
c3d32120
RK
1469 /* We can set the alignment from the type if we are making an object,
1470 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
7ccf35ed
DN
1471 if (objectp || TREE_CODE (t) == INDIRECT_REF
1472 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1473 || TYPE_ALIGN_OK (type))
c3d32120 1474 align = MAX (align, TYPE_ALIGN (type));
7ccf35ed
DN
1475 else
1476 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1477 {
1478 if (integer_zerop (TREE_OPERAND (t, 1)))
1479 /* We don't know anything about the alignment. */
1480 align = BITS_PER_UNIT;
1481 else
1482 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1483 }
40c0668b 1484
738cc472
RK
1485 /* If the size is known, we can set that. */
1486 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1487 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1488
80965c18
RK
1489 /* If T is not a type, we may be able to deduce some more information about
1490 the expression. */
1491 if (! TYPE_P (t))
8ac61af7 1492 {
8476af98 1493 tree base;
389fdba0 1494
8ac61af7
RK
1495 if (TREE_THIS_VOLATILE (t))
1496 MEM_VOLATILE_P (ref) = 1;
173b24b9 1497
c56e3582
RK
1498 /* Now remove any conversions: they don't change what the underlying
1499 object is. Likewise for SAVE_EXPR. */
8ac61af7 1500 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
c56e3582
RK
1501 || TREE_CODE (t) == NON_LVALUE_EXPR
1502 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1503 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1504 t = TREE_OPERAND (t, 0);
1505
8476af98
RH
1506 /* We may look through structure-like accesses for the purposes of
1507 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1508 base = t;
1509 while (TREE_CODE (base) == COMPONENT_REF
1510 || TREE_CODE (base) == REALPART_EXPR
1511 || TREE_CODE (base) == IMAGPART_EXPR
1512 || TREE_CODE (base) == BIT_FIELD_REF)
1513 base = TREE_OPERAND (base, 0);
1514
1515 if (DECL_P (base))
1516 {
1517 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1518 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1519 else
1520 MEM_NOTRAP_P (ref) = 1;
1521 }
1522 else
1523 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1524
1525 base = get_base_address (base);
1526 if (base && DECL_P (base)
1527 && TREE_READONLY (base)
1528 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1529 {
1530 tree base_type = TREE_TYPE (base);
1531 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1532 || DECL_ARTIFICIAL (base));
1533 MEM_READONLY_P (ref) = 1;
1534 }
1535
2039d7aa
RH
1536 /* If this expression uses it's parent's alias set, mark it such
1537 that we won't change it. */
1538 if (component_uses_parent_alias_set (t))
10b76d73
RK
1539 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1540
8ac61af7
RK
1541 /* If this is a decl, set the attributes of the MEM from it. */
1542 if (DECL_P (t))
1543 {
998d7deb
RH
1544 expr = t;
1545 offset = const0_rtx;
6f1087be 1546 apply_bitpos = bitpos;
8ac61af7
RK
1547 size = (DECL_SIZE_UNIT (t)
1548 && host_integerp (DECL_SIZE_UNIT (t), 1)
1549 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1550 align = DECL_ALIGN (t);
8ac61af7
RK
1551 }
1552
40c0668b 1553 /* If this is a constant, we know the alignment. */
6615c446 1554 else if (CONSTANT_CLASS_P (t))
9ddfb1a7
RK
1555 {
1556 align = TYPE_ALIGN (type);
1557#ifdef CONSTANT_ALIGNMENT
1558 align = CONSTANT_ALIGNMENT (t, align);
1559#endif
1560 }
998d7deb
RH
1561
1562 /* If this is a field reference and not a bit-field, record it. */
1563 /* ??? There is some information that can be gleened from bit-fields,
1564 such as the word offset in the structure that might be modified.
1565 But skip it for now. */
1566 else if (TREE_CODE (t) == COMPONENT_REF
1567 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1568 {
1569 expr = component_ref_for_mem_expr (t);
1570 offset = const0_rtx;
6f1087be 1571 apply_bitpos = bitpos;
998d7deb
RH
1572 /* ??? Any reason the field size would be different than
1573 the size we got from the type? */
1574 }
1575
1576 /* If this is an array reference, look for an outer field reference. */
1577 else if (TREE_CODE (t) == ARRAY_REF)
1578 {
1579 tree off_tree = size_zero_node;
1b1838b6
JW
1580 /* We can't modify t, because we use it at the end of the
1581 function. */
1582 tree t2 = t;
998d7deb
RH
1583
1584 do
1585 {
1b1838b6 1586 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1587 tree low_bound = array_ref_low_bound (t2);
1588 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1589
1590 /* We assume all arrays have sizes that are a multiple of a byte.
1591 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1592 index, then convert to sizetype and multiply by the size of
1593 the array element. */
1594 if (! integer_zerop (low_bound))
4845b383
KH
1595 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1596 index, low_bound);
2567406a 1597
44de5aeb
RK
1598 off_tree = size_binop (PLUS_EXPR,
1599 size_binop (MULT_EXPR, convert (sizetype,
1600 index),
1601 unit_size),
1602 off_tree);
1b1838b6 1603 t2 = TREE_OPERAND (t2, 0);
998d7deb 1604 }
1b1838b6 1605 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1606
1b1838b6 1607 if (DECL_P (t2))
c67a1cf6 1608 {
1b1838b6 1609 expr = t2;
40cb04f1 1610 offset = NULL;
c67a1cf6 1611 if (host_integerp (off_tree, 1))
40cb04f1
RH
1612 {
1613 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1614 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1615 align = DECL_ALIGN (t2);
fc555370 1616 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1
RH
1617 align = aoff;
1618 offset = GEN_INT (ioff);
6f1087be 1619 apply_bitpos = bitpos;
40cb04f1 1620 }
c67a1cf6 1621 }
1b1838b6 1622 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1623 {
1b1838b6 1624 expr = component_ref_for_mem_expr (t2);
998d7deb 1625 if (host_integerp (off_tree, 1))
6f1087be
RH
1626 {
1627 offset = GEN_INT (tree_low_cst (off_tree, 1));
1628 apply_bitpos = bitpos;
1629 }
998d7deb
RH
1630 /* ??? Any reason the field size would be different than
1631 the size we got from the type? */
1632 }
c67a1cf6 1633 else if (flag_argument_noalias > 1
1b096a0a 1634 && (INDIRECT_REF_P (t2))
1b1838b6 1635 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
c67a1cf6 1636 {
1b1838b6 1637 expr = t2;
c67a1cf6
RH
1638 offset = NULL;
1639 }
1640 }
1641
1642 /* If this is a Fortran indirect argument reference, record the
1643 parameter decl. */
1644 else if (flag_argument_noalias > 1
1b096a0a 1645 && (INDIRECT_REF_P (t))
c67a1cf6
RH
1646 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1647 {
1648 expr = t;
1649 offset = NULL;
998d7deb 1650 }
8ac61af7
RK
1651 }
1652
15c812e3 1653 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1654 bit position offset. Similarly, increase the size of the accessed
1655 object to contain the negative offset. */
6f1087be 1656 if (apply_bitpos)
8c317c5f
RH
1657 {
1658 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1659 if (size)
1660 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1661 }
6f1087be 1662
7ccf35ed
DN
1663 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1664 {
1665 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1666 we're overlapping. */
1667 offset = NULL;
1668 expr = NULL;
1669 }
1670
8ac61af7 1671 /* Now set the attributes we computed above. */
10b76d73 1672 MEM_ATTRS (ref)
998d7deb 1673 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
8ac61af7
RK
1674
1675 /* If this is already known to be a scalar or aggregate, we are done. */
1676 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1677 return;
1678
8ac61af7
RK
1679 /* If it is a reference into an aggregate, this is part of an aggregate.
1680 Otherwise we don't know. */
173b24b9
RK
1681 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1682 || TREE_CODE (t) == ARRAY_RANGE_REF
1683 || TREE_CODE (t) == BIT_FIELD_REF)
1684 MEM_IN_STRUCT_P (ref) = 1;
1685}
1686
6f1087be 1687void
502b8322 1688set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1689{
1690 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1691}
1692
a560d4d4
JH
1693/* Set the decl for MEM to DECL. */
1694
1695void
502b8322 1696set_mem_attrs_from_reg (rtx mem, rtx reg)
a560d4d4
JH
1697{
1698 MEM_ATTRS (mem)
1699 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1700 GEN_INT (REG_OFFSET (reg)),
1701 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1702}
1703
173b24b9
RK
1704/* Set the alias set of MEM to SET. */
1705
1706void
502b8322 1707set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
173b24b9 1708{
68252e27 1709#ifdef ENABLE_CHECKING
173b24b9 1710 /* If the new and old alias sets don't conflict, something is wrong. */
5b0264cb 1711 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
173b24b9
RK
1712#endif
1713
998d7deb 1714 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73
RK
1715 MEM_SIZE (mem), MEM_ALIGN (mem),
1716 GET_MODE (mem));
173b24b9 1717}
738cc472 1718
d022d93e 1719/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1720
1721void
502b8322 1722set_mem_align (rtx mem, unsigned int align)
738cc472 1723{
998d7deb 1724 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73
RK
1725 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1726 GET_MODE (mem));
738cc472 1727}
1285011e 1728
998d7deb 1729/* Set the expr for MEM to EXPR. */
1285011e
RK
1730
1731void
502b8322 1732set_mem_expr (rtx mem, tree expr)
1285011e
RK
1733{
1734 MEM_ATTRS (mem)
998d7deb 1735 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1285011e
RK
1736 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1737}
998d7deb
RH
1738
1739/* Set the offset of MEM to OFFSET. */
1740
1741void
502b8322 1742set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
1743{
1744 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1745 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1746 GET_MODE (mem));
35aff10b
AM
1747}
1748
1749/* Set the size of MEM to SIZE. */
1750
1751void
502b8322 1752set_mem_size (rtx mem, rtx size)
35aff10b
AM
1753{
1754 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1755 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1756 GET_MODE (mem));
998d7deb 1757}
173b24b9 1758\f
738cc472
RK
1759/* Return a memory reference like MEMREF, but with its mode changed to MODE
1760 and its address changed to ADDR. (VOIDmode means don't change the mode.
1761 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1762 returned memory location is required to be valid. The memory
1763 attributes are not changed. */
23b2ce53 1764
738cc472 1765static rtx
502b8322 1766change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53
RS
1767{
1768 rtx new;
1769
5b0264cb 1770 gcc_assert (MEM_P (memref));
23b2ce53
RS
1771 if (mode == VOIDmode)
1772 mode = GET_MODE (memref);
1773 if (addr == 0)
1774 addr = XEXP (memref, 0);
a74ff877
JH
1775 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1776 && (!validate || memory_address_p (mode, addr)))
1777 return memref;
23b2ce53 1778
f1ec5147 1779 if (validate)
23b2ce53 1780 {
f1ec5147 1781 if (reload_in_progress || reload_completed)
5b0264cb 1782 gcc_assert (memory_address_p (mode, addr));
f1ec5147
RK
1783 else
1784 addr = memory_address (mode, addr);
23b2ce53 1785 }
750c9258 1786
9b04c6a8
RK
1787 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1788 return memref;
1789
3b80f6ca 1790 new = gen_rtx_MEM (mode, addr);
c6df88cb 1791 MEM_COPY_ATTRIBUTES (new, memref);
23b2ce53
RS
1792 return new;
1793}
792760b9 1794
738cc472
RK
1795/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1796 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1797
1798rtx
502b8322 1799change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1800{
4e44c1ef 1801 rtx new = change_address_1 (memref, mode, addr, 1), size;
738cc472 1802 enum machine_mode mmode = GET_MODE (new);
4e44c1ef
JJ
1803 unsigned int align;
1804
1805 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1806 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
c2f7bcc3 1807
fdb1c7b3
JH
1808 /* If there are no changes, just return the original memory reference. */
1809 if (new == memref)
4e44c1ef
JJ
1810 {
1811 if (MEM_ATTRS (memref) == 0
1812 || (MEM_EXPR (memref) == NULL
1813 && MEM_OFFSET (memref) == NULL
1814 && MEM_SIZE (memref) == size
1815 && MEM_ALIGN (memref) == align))
1816 return new;
1817
64fc7c00 1818 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
4e44c1ef
JJ
1819 MEM_COPY_ATTRIBUTES (new, memref);
1820 }
fdb1c7b3 1821
738cc472 1822 MEM_ATTRS (new)
4e44c1ef 1823 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
823e3574 1824
738cc472 1825 return new;
f4ef873c 1826}
792760b9 1827
738cc472
RK
1828/* Return a memory reference like MEMREF, but with its mode changed
1829 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
1830 nonzero, the memory address is forced to be valid.
1831 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1832 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
1833
1834rtx
502b8322
AJ
1835adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1836 int validate, int adjust)
f1ec5147 1837{
823e3574 1838 rtx addr = XEXP (memref, 0);
738cc472
RK
1839 rtx new;
1840 rtx memoffset = MEM_OFFSET (memref);
10b76d73 1841 rtx size = 0;
738cc472 1842 unsigned int memalign = MEM_ALIGN (memref);
823e3574 1843
fdb1c7b3
JH
1844 /* If there are no changes, just return the original memory reference. */
1845 if (mode == GET_MODE (memref) && !offset
1846 && (!validate || memory_address_p (mode, addr)))
1847 return memref;
1848
d14419e4 1849 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 1850 This may happen even if offset is nonzero -- consider
d14419e4
RH
1851 (plus (plus reg reg) const_int) -- so do this always. */
1852 addr = copy_rtx (addr);
1853
4a78c787
RH
1854 if (adjust)
1855 {
1856 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1857 object, we can merge it into the LO_SUM. */
1858 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1859 && offset >= 0
1860 && (unsigned HOST_WIDE_INT) offset
1861 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1862 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1863 plus_constant (XEXP (addr, 1), offset));
1864 else
1865 addr = plus_constant (addr, offset);
1866 }
823e3574 1867
738cc472
RK
1868 new = change_address_1 (memref, mode, addr, validate);
1869
1870 /* Compute the new values of the memory attributes due to this adjustment.
1871 We add the offsets and update the alignment. */
1872 if (memoffset)
1873 memoffset = GEN_INT (offset + INTVAL (memoffset));
1874
03bf2c23
RK
1875 /* Compute the new alignment by taking the MIN of the alignment and the
1876 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1877 if zero. */
1878 if (offset != 0)
3bf1e984
RK
1879 memalign
1880 = MIN (memalign,
1881 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 1882
10b76d73 1883 /* We can compute the size in a number of ways. */
a06ef755
RK
1884 if (GET_MODE (new) != BLKmode)
1885 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
10b76d73
RK
1886 else if (MEM_SIZE (memref))
1887 size = plus_constant (MEM_SIZE (memref), -offset);
1888
998d7deb 1889 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
10b76d73 1890 memoffset, size, memalign, GET_MODE (new));
738cc472
RK
1891
1892 /* At some point, we should validate that this offset is within the object,
1893 if all the appropriate values are known. */
1894 return new;
f1ec5147
RK
1895}
1896
630036c6
JJ
1897/* Return a memory reference like MEMREF, but with its mode changed
1898 to MODE and its address changed to ADDR, which is assumed to be
1899 MEMREF offseted by OFFSET bytes. If VALIDATE is
1900 nonzero, the memory address is forced to be valid. */
1901
1902rtx
502b8322
AJ
1903adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1904 HOST_WIDE_INT offset, int validate)
630036c6
JJ
1905{
1906 memref = change_address_1 (memref, VOIDmode, addr, validate);
1907 return adjust_address_1 (memref, mode, offset, validate, 0);
1908}
1909
8ac61af7
RK
1910/* Return a memory reference like MEMREF, but whose address is changed by
1911 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1912 known to be in OFFSET (possibly 1). */
0d4903b8
RK
1913
1914rtx
502b8322 1915offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 1916{
e3c8ea67
RH
1917 rtx new, addr = XEXP (memref, 0);
1918
1919 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1920
68252e27 1921 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 1922 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
1923
1924 However, if we did go and rearrange things, we can wind up not
1925 being able to recognize the magic around pic_offset_table_rtx.
1926 This stuff is fragile, and is yet another example of why it is
1927 bad to expose PIC machinery too early. */
1928 if (! memory_address_p (GET_MODE (memref), new)
1929 && GET_CODE (addr) == PLUS
1930 && XEXP (addr, 0) == pic_offset_table_rtx)
1931 {
1932 addr = force_reg (GET_MODE (addr), addr);
1933 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1934 }
1935
f6041ed8 1936 update_temp_slot_address (XEXP (memref, 0), new);
e3c8ea67 1937 new = change_address_1 (memref, VOIDmode, new, 1);
0d4903b8 1938
fdb1c7b3
JH
1939 /* If there are no changes, just return the original memory reference. */
1940 if (new == memref)
1941 return new;
1942
0d4903b8
RK
1943 /* Update the alignment to reflect the offset. Reset the offset, which
1944 we don't know. */
2cc2d4bb
RK
1945 MEM_ATTRS (new)
1946 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 1947 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2cc2d4bb 1948 GET_MODE (new));
0d4903b8
RK
1949 return new;
1950}
68252e27 1951
792760b9
RK
1952/* Return a memory reference like MEMREF, but with its address changed to
1953 ADDR. The caller is asserting that the actual piece of memory pointed
1954 to is the same, just the form of the address is being changed, such as
1955 by putting something into a register. */
1956
1957rtx
502b8322 1958replace_equiv_address (rtx memref, rtx addr)
792760b9 1959{
738cc472
RK
1960 /* change_address_1 copies the memory attribute structure without change
1961 and that's exactly what we want here. */
40c0668b 1962 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 1963 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 1964}
738cc472 1965
f1ec5147
RK
1966/* Likewise, but the reference is not required to be valid. */
1967
1968rtx
502b8322 1969replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 1970{
f1ec5147
RK
1971 return change_address_1 (memref, VOIDmode, addr, 0);
1972}
e7dfe4bb
RH
1973
1974/* Return a memory reference like MEMREF, but with its mode widened to
1975 MODE and offset by OFFSET. This would be used by targets that e.g.
1976 cannot issue QImode memory operations and have to use SImode memory
1977 operations plus masking logic. */
1978
1979rtx
502b8322 1980widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb
RH
1981{
1982 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1983 tree expr = MEM_EXPR (new);
1984 rtx memoffset = MEM_OFFSET (new);
1985 unsigned int size = GET_MODE_SIZE (mode);
1986
fdb1c7b3
JH
1987 /* If there are no changes, just return the original memory reference. */
1988 if (new == memref)
1989 return new;
1990
e7dfe4bb
RH
1991 /* If we don't know what offset we were at within the expression, then
1992 we can't know if we've overstepped the bounds. */
fa1591cb 1993 if (! memoffset)
e7dfe4bb
RH
1994 expr = NULL_TREE;
1995
1996 while (expr)
1997 {
1998 if (TREE_CODE (expr) == COMPONENT_REF)
1999 {
2000 tree field = TREE_OPERAND (expr, 1);
44de5aeb 2001 tree offset = component_ref_field_offset (expr);
e7dfe4bb
RH
2002
2003 if (! DECL_SIZE_UNIT (field))
2004 {
2005 expr = NULL_TREE;
2006 break;
2007 }
2008
2009 /* Is the field at least as large as the access? If so, ok,
2010 otherwise strip back to the containing structure. */
03667700
RK
2011 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2012 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
2013 && INTVAL (memoffset) >= 0)
2014 break;
2015
44de5aeb 2016 if (! host_integerp (offset, 1))
e7dfe4bb
RH
2017 {
2018 expr = NULL_TREE;
2019 break;
2020 }
2021
2022 expr = TREE_OPERAND (expr, 0);
44de5aeb
RK
2023 memoffset
2024 = (GEN_INT (INTVAL (memoffset)
2025 + tree_low_cst (offset, 1)
2026 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2027 / BITS_PER_UNIT)));
e7dfe4bb
RH
2028 }
2029 /* Similarly for the decl. */
2030 else if (DECL_P (expr)
2031 && DECL_SIZE_UNIT (expr)
45f79783 2032 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
2033 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2034 && (! memoffset || INTVAL (memoffset) >= 0))
2035 break;
2036 else
2037 {
2038 /* The widened memory access overflows the expression, which means
2039 that it could alias another expression. Zap it. */
2040 expr = NULL_TREE;
2041 break;
2042 }
2043 }
2044
2045 if (! expr)
2046 memoffset = NULL_RTX;
2047
2048 /* The widened memory may alias other stuff, so zap the alias set. */
2049 /* ??? Maybe use get_alias_set on any remaining expression. */
2050
2051 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2052 MEM_ALIGN (new), mode);
2053
2054 return new;
2055}
23b2ce53
RS
2056\f
2057/* Return a newly created CODE_LABEL rtx with a unique label number. */
2058
2059rtx
502b8322 2060gen_label_rtx (void)
23b2ce53 2061{
0dc36574 2062 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2063 NULL, label_num++, NULL);
23b2ce53
RS
2064}
2065\f
2066/* For procedure integration. */
2067
23b2ce53 2068/* Install new pointers to the first and last insns in the chain.
86fe05e0 2069 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2070 Used for an inline-procedure after copying the insn chain. */
2071
2072void
502b8322 2073set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2074{
86fe05e0
RK
2075 rtx insn;
2076
23b2ce53
RS
2077 first_insn = first;
2078 last_insn = last;
86fe05e0
RK
2079 cur_insn_uid = 0;
2080
2081 for (insn = first; insn; insn = NEXT_INSN (insn))
2082 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2083
2084 cur_insn_uid++;
23b2ce53 2085}
23b2ce53 2086\f
750c9258 2087/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2088 structure. This routine should only be called once. */
23b2ce53 2089
fd743bc1
PB
2090static void
2091unshare_all_rtl_1 (tree fndecl, rtx insn)
23b2ce53 2092{
d1b81779 2093 tree decl;
23b2ce53 2094
d1b81779
GK
2095 /* Make sure that virtual parameters are not shared. */
2096 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
19e7881c 2097 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
d1b81779 2098
5c6df058
AO
2099 /* Make sure that virtual stack slots are not shared. */
2100 unshare_all_decls (DECL_INITIAL (fndecl));
2101
d1b81779 2102 /* Unshare just about everything else. */
2c07f13b 2103 unshare_all_rtl_in_chain (insn);
750c9258 2104
23b2ce53
RS
2105 /* Make sure the addresses of stack slots found outside the insn chain
2106 (such as, in DECL_RTL of a variable) are not shared
2107 with the insn chain.
2108
2109 This special care is necessary when the stack slot MEM does not
2110 actually appear in the insn chain. If it does appear, its address
2111 is unshared from all else at that point. */
242b0ce6 2112 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2113}
2114
750c9258 2115/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2116 structure, again. This is a fairly expensive thing to do so it
2117 should be done sparingly. */
2118
2119void
502b8322 2120unshare_all_rtl_again (rtx insn)
d1b81779
GK
2121{
2122 rtx p;
624c87aa
RE
2123 tree decl;
2124
d1b81779 2125 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2126 if (INSN_P (p))
d1b81779
GK
2127 {
2128 reset_used_flags (PATTERN (p));
2129 reset_used_flags (REG_NOTES (p));
2130 reset_used_flags (LOG_LINKS (p));
2131 }
624c87aa 2132
2d4aecb3
AO
2133 /* Make sure that virtual stack slots are not shared. */
2134 reset_used_decls (DECL_INITIAL (cfun->decl));
2135
624c87aa
RE
2136 /* Make sure that virtual parameters are not shared. */
2137 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2138 reset_used_flags (DECL_RTL (decl));
2139
2140 reset_used_flags (stack_slot_list);
2141
fd743bc1
PB
2142 unshare_all_rtl_1 (cfun->decl, insn);
2143}
2144
2145void
2146unshare_all_rtl (void)
2147{
2148 unshare_all_rtl_1 (current_function_decl, get_insns ());
d1b81779
GK
2149}
2150
ef330312
PB
2151struct tree_opt_pass pass_unshare_all_rtl =
2152{
defb77dc 2153 "unshare", /* name */
ef330312
PB
2154 NULL, /* gate */
2155 unshare_all_rtl, /* execute */
2156 NULL, /* sub */
2157 NULL, /* next */
2158 0, /* static_pass_number */
2159 0, /* tv_id */
2160 0, /* properties_required */
2161 0, /* properties_provided */
2162 0, /* properties_destroyed */
2163 0, /* todo_flags_start */
defb77dc 2164 TODO_dump_func, /* todo_flags_finish */
ef330312
PB
2165 0 /* letter */
2166};
2167
2168
2c07f13b
JH
2169/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2170 Recursively does the same for subexpressions. */
2171
2172static void
2173verify_rtx_sharing (rtx orig, rtx insn)
2174{
2175 rtx x = orig;
2176 int i;
2177 enum rtx_code code;
2178 const char *format_ptr;
2179
2180 if (x == 0)
2181 return;
2182
2183 code = GET_CODE (x);
2184
2185 /* These types may be freely shared. */
2186
2187 switch (code)
2188 {
2189 case REG:
2c07f13b
JH
2190 case CONST_INT:
2191 case CONST_DOUBLE:
2192 case CONST_VECTOR:
2193 case SYMBOL_REF:
2194 case LABEL_REF:
2195 case CODE_LABEL:
2196 case PC:
2197 case CC0:
2198 case SCRATCH:
2c07f13b 2199 return;
3e89ed8d
JH
2200 /* SCRATCH must be shared because they represent distinct values. */
2201 case CLOBBER:
2202 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2203 return;
2204 break;
2c07f13b
JH
2205
2206 case CONST:
2207 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2208 a LABEL_REF, it isn't sharable. */
2209 if (GET_CODE (XEXP (x, 0)) == PLUS
2210 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2211 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2212 return;
2213 break;
2214
2215 case MEM:
2216 /* A MEM is allowed to be shared if its address is constant. */
2217 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2218 || reload_completed || reload_in_progress)
2219 return;
2220
2221 break;
2222
2223 default:
2224 break;
2225 }
2226
2227 /* This rtx may not be shared. If it has already been seen,
2228 replace it with a copy of itself. */
1a2caa7a 2229#ifdef ENABLE_CHECKING
2c07f13b
JH
2230 if (RTX_FLAG (x, used))
2231 {
ab532386 2232 error ("invalid rtl sharing found in the insn");
2c07f13b 2233 debug_rtx (insn);
ab532386 2234 error ("shared rtx");
2c07f13b 2235 debug_rtx (x);
ab532386 2236 internal_error ("internal consistency failure");
2c07f13b 2237 }
1a2caa7a
NS
2238#endif
2239 gcc_assert (!RTX_FLAG (x, used));
2240
2c07f13b
JH
2241 RTX_FLAG (x, used) = 1;
2242
6614fd40 2243 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2244
2245 format_ptr = GET_RTX_FORMAT (code);
2246
2247 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2248 {
2249 switch (*format_ptr++)
2250 {
2251 case 'e':
2252 verify_rtx_sharing (XEXP (x, i), insn);
2253 break;
2254
2255 case 'E':
2256 if (XVEC (x, i) != NULL)
2257 {
2258 int j;
2259 int len = XVECLEN (x, i);
2260
2261 for (j = 0; j < len; j++)
2262 {
1a2caa7a
NS
2263 /* We allow sharing of ASM_OPERANDS inside single
2264 instruction. */
2c07f13b 2265 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2266 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2267 == ASM_OPERANDS))
2c07f13b
JH
2268 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2269 else
2270 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2271 }
2272 }
2273 break;
2274 }
2275 }
2276 return;
2277}
2278
ba228239 2279/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2280 sharing in between the subexpressions. */
2281
2282void
2283verify_rtl_sharing (void)
2284{
2285 rtx p;
2286
2287 for (p = get_insns (); p; p = NEXT_INSN (p))
2288 if (INSN_P (p))
2289 {
2290 reset_used_flags (PATTERN (p));
2291 reset_used_flags (REG_NOTES (p));
2292 reset_used_flags (LOG_LINKS (p));
2293 }
2294
2295 for (p = get_insns (); p; p = NEXT_INSN (p))
2296 if (INSN_P (p))
2297 {
2298 verify_rtx_sharing (PATTERN (p), p);
2299 verify_rtx_sharing (REG_NOTES (p), p);
2300 verify_rtx_sharing (LOG_LINKS (p), p);
2301 }
2302}
2303
d1b81779
GK
2304/* Go through all the RTL insn bodies and copy any invalid shared structure.
2305 Assumes the mark bits are cleared at entry. */
2306
2c07f13b
JH
2307void
2308unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2309{
2310 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2311 if (INSN_P (insn))
d1b81779
GK
2312 {
2313 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2314 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2315 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2316 }
2317}
2318
5c6df058
AO
2319/* Go through all virtual stack slots of a function and copy any
2320 shared structure. */
2321static void
502b8322 2322unshare_all_decls (tree blk)
5c6df058
AO
2323{
2324 tree t;
2325
2326 /* Copy shared decls. */
2327 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2328 if (DECL_RTL_SET_P (t))
2329 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
5c6df058
AO
2330
2331 /* Now process sub-blocks. */
2332 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2333 unshare_all_decls (t);
2334}
2335
2d4aecb3 2336/* Go through all virtual stack slots of a function and mark them as
30f7a378 2337 not shared. */
2d4aecb3 2338static void
502b8322 2339reset_used_decls (tree blk)
2d4aecb3
AO
2340{
2341 tree t;
2342
2343 /* Mark decls. */
2344 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2345 if (DECL_RTL_SET_P (t))
2346 reset_used_flags (DECL_RTL (t));
2d4aecb3
AO
2347
2348 /* Now process sub-blocks. */
2349 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2350 reset_used_decls (t);
2351}
2352
23b2ce53 2353/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2354 Recursively does the same for subexpressions. Uses
2355 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2356
2357rtx
502b8322 2358copy_rtx_if_shared (rtx orig)
23b2ce53 2359{
32b32b16
AP
2360 copy_rtx_if_shared_1 (&orig);
2361 return orig;
2362}
2363
ff954f39
AP
2364/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2365 use. Recursively does the same for subexpressions. */
2366
32b32b16
AP
2367static void
2368copy_rtx_if_shared_1 (rtx *orig1)
2369{
2370 rtx x;
b3694847
SS
2371 int i;
2372 enum rtx_code code;
32b32b16 2373 rtx *last_ptr;
b3694847 2374 const char *format_ptr;
23b2ce53 2375 int copied = 0;
32b32b16
AP
2376 int length;
2377
2378 /* Repeat is used to turn tail-recursion into iteration. */
2379repeat:
2380 x = *orig1;
23b2ce53
RS
2381
2382 if (x == 0)
32b32b16 2383 return;
23b2ce53
RS
2384
2385 code = GET_CODE (x);
2386
2387 /* These types may be freely shared. */
2388
2389 switch (code)
2390 {
2391 case REG:
23b2ce53
RS
2392 case CONST_INT:
2393 case CONST_DOUBLE:
69ef87e2 2394 case CONST_VECTOR:
23b2ce53 2395 case SYMBOL_REF:
2c07f13b 2396 case LABEL_REF:
23b2ce53
RS
2397 case CODE_LABEL:
2398 case PC:
2399 case CC0:
2400 case SCRATCH:
0f41302f 2401 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2402 return;
3e89ed8d
JH
2403 case CLOBBER:
2404 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2405 return;
2406 break;
23b2ce53 2407
b851ea09
RK
2408 case CONST:
2409 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2410 a LABEL_REF, it isn't sharable. */
2411 if (GET_CODE (XEXP (x, 0)) == PLUS
2412 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2413 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
32b32b16 2414 return;
b851ea09
RK
2415 break;
2416
23b2ce53
RS
2417 case INSN:
2418 case JUMP_INSN:
2419 case CALL_INSN:
2420 case NOTE:
23b2ce53
RS
2421 case BARRIER:
2422 /* The chain of insns is not being copied. */
32b32b16 2423 return;
23b2ce53 2424
e9a25f70
JL
2425 default:
2426 break;
23b2ce53
RS
2427 }
2428
2429 /* This rtx may not be shared. If it has already been seen,
2430 replace it with a copy of itself. */
2431
2adc7f12 2432 if (RTX_FLAG (x, used))
23b2ce53 2433 {
aacd3885 2434 x = shallow_copy_rtx (x);
23b2ce53
RS
2435 copied = 1;
2436 }
2adc7f12 2437 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2438
2439 /* Now scan the subexpressions recursively.
2440 We can store any replaced subexpressions directly into X
2441 since we know X is not shared! Any vectors in X
2442 must be copied if X was copied. */
2443
2444 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2445 length = GET_RTX_LENGTH (code);
2446 last_ptr = NULL;
2447
2448 for (i = 0; i < length; i++)
23b2ce53
RS
2449 {
2450 switch (*format_ptr++)
2451 {
2452 case 'e':
32b32b16
AP
2453 if (last_ptr)
2454 copy_rtx_if_shared_1 (last_ptr);
2455 last_ptr = &XEXP (x, i);
23b2ce53
RS
2456 break;
2457
2458 case 'E':
2459 if (XVEC (x, i) != NULL)
2460 {
b3694847 2461 int j;
f0722107 2462 int len = XVECLEN (x, i);
32b32b16 2463
6614fd40
KH
2464 /* Copy the vector iff I copied the rtx and the length
2465 is nonzero. */
f0722107 2466 if (copied && len > 0)
8f985ec4 2467 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
32b32b16 2468
5d3cc252 2469 /* Call recursively on all inside the vector. */
f0722107 2470 for (j = 0; j < len; j++)
32b32b16
AP
2471 {
2472 if (last_ptr)
2473 copy_rtx_if_shared_1 (last_ptr);
2474 last_ptr = &XVECEXP (x, i, j);
2475 }
23b2ce53
RS
2476 }
2477 break;
2478 }
2479 }
32b32b16
AP
2480 *orig1 = x;
2481 if (last_ptr)
2482 {
2483 orig1 = last_ptr;
2484 goto repeat;
2485 }
2486 return;
23b2ce53
RS
2487}
2488
2489/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2490 to look for shared sub-parts. */
2491
2492void
502b8322 2493reset_used_flags (rtx x)
23b2ce53 2494{
b3694847
SS
2495 int i, j;
2496 enum rtx_code code;
2497 const char *format_ptr;
32b32b16 2498 int length;
23b2ce53 2499
32b32b16
AP
2500 /* Repeat is used to turn tail-recursion into iteration. */
2501repeat:
23b2ce53
RS
2502 if (x == 0)
2503 return;
2504
2505 code = GET_CODE (x);
2506
9faa82d8 2507 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2508 for them. */
2509
2510 switch (code)
2511 {
2512 case REG:
23b2ce53
RS
2513 case CONST_INT:
2514 case CONST_DOUBLE:
69ef87e2 2515 case CONST_VECTOR:
23b2ce53
RS
2516 case SYMBOL_REF:
2517 case CODE_LABEL:
2518 case PC:
2519 case CC0:
2520 return;
2521
2522 case INSN:
2523 case JUMP_INSN:
2524 case CALL_INSN:
2525 case NOTE:
2526 case LABEL_REF:
2527 case BARRIER:
2528 /* The chain of insns is not being copied. */
2529 return;
750c9258 2530
e9a25f70
JL
2531 default:
2532 break;
23b2ce53
RS
2533 }
2534
2adc7f12 2535 RTX_FLAG (x, used) = 0;
23b2ce53
RS
2536
2537 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2538 length = GET_RTX_LENGTH (code);
2539
2540 for (i = 0; i < length; i++)
23b2ce53
RS
2541 {
2542 switch (*format_ptr++)
2543 {
2544 case 'e':
32b32b16
AP
2545 if (i == length-1)
2546 {
2547 x = XEXP (x, i);
2548 goto repeat;
2549 }
23b2ce53
RS
2550 reset_used_flags (XEXP (x, i));
2551 break;
2552
2553 case 'E':
2554 for (j = 0; j < XVECLEN (x, i); j++)
2555 reset_used_flags (XVECEXP (x, i, j));
2556 break;
2557 }
2558 }
2559}
2c07f13b
JH
2560
2561/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2562 to look for shared sub-parts. */
2563
2564void
2565set_used_flags (rtx x)
2566{
2567 int i, j;
2568 enum rtx_code code;
2569 const char *format_ptr;
2570
2571 if (x == 0)
2572 return;
2573
2574 code = GET_CODE (x);
2575
2576 /* These types may be freely shared so we needn't do any resetting
2577 for them. */
2578
2579 switch (code)
2580 {
2581 case REG:
2c07f13b
JH
2582 case CONST_INT:
2583 case CONST_DOUBLE:
2584 case CONST_VECTOR:
2585 case SYMBOL_REF:
2586 case CODE_LABEL:
2587 case PC:
2588 case CC0:
2589 return;
2590
2591 case INSN:
2592 case JUMP_INSN:
2593 case CALL_INSN:
2594 case NOTE:
2595 case LABEL_REF:
2596 case BARRIER:
2597 /* The chain of insns is not being copied. */
2598 return;
2599
2600 default:
2601 break;
2602 }
2603
2604 RTX_FLAG (x, used) = 1;
2605
2606 format_ptr = GET_RTX_FORMAT (code);
2607 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2608 {
2609 switch (*format_ptr++)
2610 {
2611 case 'e':
2612 set_used_flags (XEXP (x, i));
2613 break;
2614
2615 case 'E':
2616 for (j = 0; j < XVECLEN (x, i); j++)
2617 set_used_flags (XVECEXP (x, i, j));
2618 break;
2619 }
2620 }
2621}
23b2ce53
RS
2622\f
2623/* Copy X if necessary so that it won't be altered by changes in OTHER.
2624 Return X or the rtx for the pseudo reg the value of X was copied into.
2625 OTHER must be valid as a SET_DEST. */
2626
2627rtx
502b8322 2628make_safe_from (rtx x, rtx other)
23b2ce53
RS
2629{
2630 while (1)
2631 switch (GET_CODE (other))
2632 {
2633 case SUBREG:
2634 other = SUBREG_REG (other);
2635 break;
2636 case STRICT_LOW_PART:
2637 case SIGN_EXTEND:
2638 case ZERO_EXTEND:
2639 other = XEXP (other, 0);
2640 break;
2641 default:
2642 goto done;
2643 }
2644 done:
3c0cb5de 2645 if ((MEM_P (other)
23b2ce53 2646 && ! CONSTANT_P (x)
f8cfc6aa 2647 && !REG_P (x)
23b2ce53 2648 && GET_CODE (x) != SUBREG)
f8cfc6aa 2649 || (REG_P (other)
23b2ce53
RS
2650 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2651 || reg_mentioned_p (other, x))))
2652 {
2653 rtx temp = gen_reg_rtx (GET_MODE (x));
2654 emit_move_insn (temp, x);
2655 return temp;
2656 }
2657 return x;
2658}
2659\f
2660/* Emission of insns (adding them to the doubly-linked list). */
2661
2662/* Return the first insn of the current sequence or current function. */
2663
2664rtx
502b8322 2665get_insns (void)
23b2ce53
RS
2666{
2667 return first_insn;
2668}
2669
3dec4024
JH
2670/* Specify a new insn as the first in the chain. */
2671
2672void
502b8322 2673set_first_insn (rtx insn)
3dec4024 2674{
5b0264cb 2675 gcc_assert (!PREV_INSN (insn));
3dec4024
JH
2676 first_insn = insn;
2677}
2678
23b2ce53
RS
2679/* Return the last insn emitted in current sequence or current function. */
2680
2681rtx
502b8322 2682get_last_insn (void)
23b2ce53
RS
2683{
2684 return last_insn;
2685}
2686
2687/* Specify a new insn as the last in the chain. */
2688
2689void
502b8322 2690set_last_insn (rtx insn)
23b2ce53 2691{
5b0264cb 2692 gcc_assert (!NEXT_INSN (insn));
23b2ce53
RS
2693 last_insn = insn;
2694}
2695
2696/* Return the last insn emitted, even if it is in a sequence now pushed. */
2697
2698rtx
502b8322 2699get_last_insn_anywhere (void)
23b2ce53
RS
2700{
2701 struct sequence_stack *stack;
2702 if (last_insn)
2703 return last_insn;
49ad7cfa 2704 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2705 if (stack->last != 0)
2706 return stack->last;
2707 return 0;
2708}
2709
2a496e8b
JDA
2710/* Return the first nonnote insn emitted in current sequence or current
2711 function. This routine looks inside SEQUENCEs. */
2712
2713rtx
502b8322 2714get_first_nonnote_insn (void)
2a496e8b 2715{
91373fe8
JDA
2716 rtx insn = first_insn;
2717
2718 if (insn)
2719 {
2720 if (NOTE_P (insn))
2721 for (insn = next_insn (insn);
2722 insn && NOTE_P (insn);
2723 insn = next_insn (insn))
2724 continue;
2725 else
2726 {
2ca202e7 2727 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2728 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2729 insn = XVECEXP (PATTERN (insn), 0, 0);
2730 }
2731 }
2a496e8b
JDA
2732
2733 return insn;
2734}
2735
2736/* Return the last nonnote insn emitted in current sequence or current
2737 function. This routine looks inside SEQUENCEs. */
2738
2739rtx
502b8322 2740get_last_nonnote_insn (void)
2a496e8b 2741{
91373fe8
JDA
2742 rtx insn = last_insn;
2743
2744 if (insn)
2745 {
2746 if (NOTE_P (insn))
2747 for (insn = previous_insn (insn);
2748 insn && NOTE_P (insn);
2749 insn = previous_insn (insn))
2750 continue;
2751 else
2752 {
2ca202e7 2753 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2754 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2755 insn = XVECEXP (PATTERN (insn), 0,
2756 XVECLEN (PATTERN (insn), 0) - 1);
2757 }
2758 }
2a496e8b
JDA
2759
2760 return insn;
2761}
2762
23b2ce53
RS
2763/* Return a number larger than any instruction's uid in this function. */
2764
2765int
502b8322 2766get_max_uid (void)
23b2ce53
RS
2767{
2768 return cur_insn_uid;
2769}
aeeeda03 2770
673b5311
MM
2771/* Renumber instructions so that no instruction UIDs are wasted. */
2772
aeeeda03 2773void
10d22567 2774renumber_insns (void)
aeeeda03
MM
2775{
2776 rtx insn;
aeeeda03 2777
673b5311
MM
2778 /* If we're not supposed to renumber instructions, don't. */
2779 if (!flag_renumber_insns)
2780 return;
2781
aeeeda03
MM
2782 /* If there aren't that many instructions, then it's not really
2783 worth renumbering them. */
673b5311 2784 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
aeeeda03
MM
2785 return;
2786
2787 cur_insn_uid = 1;
2788
2789 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
673b5311 2790 {
10d22567
ZD
2791 if (dump_file)
2792 fprintf (dump_file, "Renumbering insn %d to %d\n",
673b5311
MM
2793 INSN_UID (insn), cur_insn_uid);
2794 INSN_UID (insn) = cur_insn_uid++;
2795 }
aeeeda03 2796}
23b2ce53
RS
2797\f
2798/* Return the next insn. If it is a SEQUENCE, return the first insn
2799 of the sequence. */
2800
2801rtx
502b8322 2802next_insn (rtx insn)
23b2ce53
RS
2803{
2804 if (insn)
2805 {
2806 insn = NEXT_INSN (insn);
4b4bf941 2807 if (insn && NONJUMP_INSN_P (insn)
23b2ce53
RS
2808 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2809 insn = XVECEXP (PATTERN (insn), 0, 0);
2810 }
2811
2812 return insn;
2813}
2814
2815/* Return the previous insn. If it is a SEQUENCE, return the last insn
2816 of the sequence. */
2817
2818rtx
502b8322 2819previous_insn (rtx insn)
23b2ce53
RS
2820{
2821 if (insn)
2822 {
2823 insn = PREV_INSN (insn);
4b4bf941 2824 if (insn && NONJUMP_INSN_P (insn)
23b2ce53
RS
2825 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2826 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2827 }
2828
2829 return insn;
2830}
2831
2832/* Return the next insn after INSN that is not a NOTE. This routine does not
2833 look inside SEQUENCEs. */
2834
2835rtx
502b8322 2836next_nonnote_insn (rtx insn)
23b2ce53
RS
2837{
2838 while (insn)
2839 {
2840 insn = NEXT_INSN (insn);
4b4bf941 2841 if (insn == 0 || !NOTE_P (insn))
23b2ce53
RS
2842 break;
2843 }
2844
2845 return insn;
2846}
2847
2848/* Return the previous insn before INSN that is not a NOTE. This routine does
2849 not look inside SEQUENCEs. */
2850
2851rtx
502b8322 2852prev_nonnote_insn (rtx insn)
23b2ce53
RS
2853{
2854 while (insn)
2855 {
2856 insn = PREV_INSN (insn);
4b4bf941 2857 if (insn == 0 || !NOTE_P (insn))
23b2ce53
RS
2858 break;
2859 }
2860
2861 return insn;
2862}
2863
2864/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2865 or 0, if there is none. This routine does not look inside
0f41302f 2866 SEQUENCEs. */
23b2ce53
RS
2867
2868rtx
502b8322 2869next_real_insn (rtx insn)
23b2ce53
RS
2870{
2871 while (insn)
2872 {
2873 insn = NEXT_INSN (insn);
bb8a619e 2874 if (insn == 0 || INSN_P (insn))
23b2ce53
RS
2875 break;
2876 }
2877
2878 return insn;
2879}
2880
2881/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2882 or 0, if there is none. This routine does not look inside
2883 SEQUENCEs. */
2884
2885rtx
502b8322 2886prev_real_insn (rtx insn)
23b2ce53
RS
2887{
2888 while (insn)
2889 {
2890 insn = PREV_INSN (insn);
bb8a619e 2891 if (insn == 0 || INSN_P (insn))
23b2ce53
RS
2892 break;
2893 }
2894
2895 return insn;
2896}
2897
ee960939
OH
2898/* Return the last CALL_INSN in the current list, or 0 if there is none.
2899 This routine does not look inside SEQUENCEs. */
2900
2901rtx
502b8322 2902last_call_insn (void)
ee960939
OH
2903{
2904 rtx insn;
2905
2906 for (insn = get_last_insn ();
4b4bf941 2907 insn && !CALL_P (insn);
ee960939
OH
2908 insn = PREV_INSN (insn))
2909 ;
2910
2911 return insn;
2912}
2913
23b2ce53
RS
2914/* Find the next insn after INSN that really does something. This routine
2915 does not look inside SEQUENCEs. Until reload has completed, this is the
2916 same as next_real_insn. */
2917
69732dcb 2918int
502b8322 2919active_insn_p (rtx insn)
69732dcb 2920{
4b4bf941
JQ
2921 return (CALL_P (insn) || JUMP_P (insn)
2922 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
2923 && (! reload_completed
2924 || (GET_CODE (PATTERN (insn)) != USE
2925 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
2926}
2927
23b2ce53 2928rtx
502b8322 2929next_active_insn (rtx insn)
23b2ce53
RS
2930{
2931 while (insn)
2932 {
2933 insn = NEXT_INSN (insn);
69732dcb 2934 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
2935 break;
2936 }
2937
2938 return insn;
2939}
2940
2941/* Find the last insn before INSN that really does something. This routine
2942 does not look inside SEQUENCEs. Until reload has completed, this is the
2943 same as prev_real_insn. */
2944
2945rtx
502b8322 2946prev_active_insn (rtx insn)
23b2ce53
RS
2947{
2948 while (insn)
2949 {
2950 insn = PREV_INSN (insn);
69732dcb 2951 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
2952 break;
2953 }
2954
2955 return insn;
2956}
2957
2958/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2959
2960rtx
502b8322 2961next_label (rtx insn)
23b2ce53
RS
2962{
2963 while (insn)
2964 {
2965 insn = NEXT_INSN (insn);
4b4bf941 2966 if (insn == 0 || LABEL_P (insn))
23b2ce53
RS
2967 break;
2968 }
2969
2970 return insn;
2971}
2972
2973/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2974
2975rtx
502b8322 2976prev_label (rtx insn)
23b2ce53
RS
2977{
2978 while (insn)
2979 {
2980 insn = PREV_INSN (insn);
4b4bf941 2981 if (insn == 0 || LABEL_P (insn))
23b2ce53
RS
2982 break;
2983 }
2984
2985 return insn;
2986}
6c2511d3
RS
2987
2988/* Return the last label to mark the same position as LABEL. Return null
2989 if LABEL itself is null. */
2990
2991rtx
2992skip_consecutive_labels (rtx label)
2993{
2994 rtx insn;
2995
2996 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
2997 if (LABEL_P (insn))
2998 label = insn;
2999
3000 return label;
3001}
23b2ce53
RS
3002\f
3003#ifdef HAVE_cc0
c572e5ba
JVA
3004/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3005 and REG_CC_USER notes so we can find it. */
3006
3007void
502b8322 3008link_cc0_insns (rtx insn)
c572e5ba
JVA
3009{
3010 rtx user = next_nonnote_insn (insn);
3011
4b4bf941 3012 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3013 user = XVECEXP (PATTERN (user), 0, 0);
3014
c5c76735
JL
3015 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3016 REG_NOTES (user));
3b80f6ca 3017 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
c572e5ba
JVA
3018}
3019
23b2ce53
RS
3020/* Return the next insn that uses CC0 after INSN, which is assumed to
3021 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3022 applied to the result of this function should yield INSN).
3023
3024 Normally, this is simply the next insn. However, if a REG_CC_USER note
3025 is present, it contains the insn that uses CC0.
3026
3027 Return 0 if we can't find the insn. */
3028
3029rtx
502b8322 3030next_cc0_user (rtx insn)
23b2ce53 3031{
906c4e36 3032 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3033
3034 if (note)
3035 return XEXP (note, 0);
3036
3037 insn = next_nonnote_insn (insn);
4b4bf941 3038 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3039 insn = XVECEXP (PATTERN (insn), 0, 0);
3040
2c3c49de 3041 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3042 return insn;
3043
3044 return 0;
3045}
3046
3047/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3048 note, it is the previous insn. */
3049
3050rtx
502b8322 3051prev_cc0_setter (rtx insn)
23b2ce53 3052{
906c4e36 3053 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3054
3055 if (note)
3056 return XEXP (note, 0);
3057
3058 insn = prev_nonnote_insn (insn);
5b0264cb 3059 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3060
3061 return insn;
3062}
3063#endif
e5bef2e4
HB
3064
3065/* Increment the label uses for all labels present in rtx. */
3066
3067static void
502b8322 3068mark_label_nuses (rtx x)
e5bef2e4 3069{
b3694847
SS
3070 enum rtx_code code;
3071 int i, j;
3072 const char *fmt;
e5bef2e4
HB
3073
3074 code = GET_CODE (x);
7537fc90 3075 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3076 LABEL_NUSES (XEXP (x, 0))++;
3077
3078 fmt = GET_RTX_FORMAT (code);
3079 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3080 {
3081 if (fmt[i] == 'e')
0fb7aeda 3082 mark_label_nuses (XEXP (x, i));
e5bef2e4 3083 else if (fmt[i] == 'E')
0fb7aeda 3084 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3085 mark_label_nuses (XVECEXP (x, i, j));
3086 }
3087}
3088
23b2ce53
RS
3089\f
3090/* Try splitting insns that can be split for better scheduling.
3091 PAT is the pattern which might split.
3092 TRIAL is the insn providing PAT.
cc2902df 3093 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3094
3095 If this routine succeeds in splitting, it returns the first or last
11147ebe 3096 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3097 returns TRIAL. If the insn to be returned can be split, it will be. */
3098
3099rtx
502b8322 3100try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3101{
3102 rtx before = PREV_INSN (trial);
3103 rtx after = NEXT_INSN (trial);
23b2ce53
RS
3104 int has_barrier = 0;
3105 rtx tem;
6b24c259
JH
3106 rtx note, seq;
3107 int probability;
599aedd9
RH
3108 rtx insn_last, insn;
3109 int njumps = 0;
6b24c259
JH
3110
3111 if (any_condjump_p (trial)
3112 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3113 split_branch_probability = INTVAL (XEXP (note, 0));
3114 probability = split_branch_probability;
3115
3116 seq = split_insns (pat, trial);
3117
3118 split_branch_probability = -1;
23b2ce53
RS
3119
3120 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3121 We may need to handle this specially. */
4b4bf941 3122 if (after && BARRIER_P (after))
23b2ce53
RS
3123 {
3124 has_barrier = 1;
3125 after = NEXT_INSN (after);
3126 }
3127
599aedd9
RH
3128 if (!seq)
3129 return trial;
3130
3131 /* Avoid infinite loop if any insn of the result matches
3132 the original pattern. */
3133 insn_last = seq;
3134 while (1)
23b2ce53 3135 {
599aedd9
RH
3136 if (INSN_P (insn_last)
3137 && rtx_equal_p (PATTERN (insn_last), pat))
3138 return trial;
3139 if (!NEXT_INSN (insn_last))
3140 break;
3141 insn_last = NEXT_INSN (insn_last);
3142 }
750c9258 3143
599aedd9
RH
3144 /* Mark labels. */
3145 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3146 {
4b4bf941 3147 if (JUMP_P (insn))
599aedd9
RH
3148 {
3149 mark_jump_label (PATTERN (insn), insn, 0);
3150 njumps++;
3151 if (probability != -1
3152 && any_condjump_p (insn)
3153 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3154 {
599aedd9
RH
3155 /* We can preserve the REG_BR_PROB notes only if exactly
3156 one jump is created, otherwise the machine description
3157 is responsible for this step using
3158 split_branch_probability variable. */
5b0264cb 3159 gcc_assert (njumps == 1);
599aedd9
RH
3160 REG_NOTES (insn)
3161 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3162 GEN_INT (probability),
3163 REG_NOTES (insn));
2f937369 3164 }
599aedd9
RH
3165 }
3166 }
3167
3168 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3169 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
4b4bf941 3170 if (CALL_P (trial))
599aedd9
RH
3171 {
3172 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3173 if (CALL_P (insn))
599aedd9 3174 {
f6a1f3f6
RH
3175 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3176 while (*p)
3177 p = &XEXP (*p, 1);
3178 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9
RH
3179 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3180 }
3181 }
4b5e8abe 3182
599aedd9
RH
3183 /* Copy notes, particularly those related to the CFG. */
3184 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3185 {
3186 switch (REG_NOTE_KIND (note))
3187 {
3188 case REG_EH_REGION:
2f937369
DM
3189 insn = insn_last;
3190 while (insn != NULL_RTX)
3191 {
4b4bf941 3192 if (CALL_P (insn)
d3a583b1 3193 || (flag_non_call_exceptions && INSN_P (insn)
599aedd9
RH
3194 && may_trap_p (PATTERN (insn))))
3195 REG_NOTES (insn)
3196 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3197 XEXP (note, 0),
3198 REG_NOTES (insn));
2f937369
DM
3199 insn = PREV_INSN (insn);
3200 }
599aedd9 3201 break;
216183ce 3202
599aedd9
RH
3203 case REG_NORETURN:
3204 case REG_SETJMP:
599aedd9
RH
3205 insn = insn_last;
3206 while (insn != NULL_RTX)
216183ce 3207 {
4b4bf941 3208 if (CALL_P (insn))
599aedd9
RH
3209 REG_NOTES (insn)
3210 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3211 XEXP (note, 0),
3212 REG_NOTES (insn));
3213 insn = PREV_INSN (insn);
216183ce 3214 }
599aedd9 3215 break;
d6e95df8 3216
599aedd9
RH
3217 case REG_NON_LOCAL_GOTO:
3218 insn = insn_last;
3219 while (insn != NULL_RTX)
2f937369 3220 {
4b4bf941 3221 if (JUMP_P (insn))
599aedd9
RH
3222 REG_NOTES (insn)
3223 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3224 XEXP (note, 0),
3225 REG_NOTES (insn));
3226 insn = PREV_INSN (insn);
2f937369 3227 }
599aedd9 3228 break;
e5bef2e4 3229
599aedd9
RH
3230 default:
3231 break;
23b2ce53 3232 }
599aedd9
RH
3233 }
3234
3235 /* If there are LABELS inside the split insns increment the
3236 usage count so we don't delete the label. */
4b4bf941 3237 if (NONJUMP_INSN_P (trial))
599aedd9
RH
3238 {
3239 insn = insn_last;
3240 while (insn != NULL_RTX)
23b2ce53 3241 {
4b4bf941 3242 if (NONJUMP_INSN_P (insn))
599aedd9 3243 mark_label_nuses (PATTERN (insn));
23b2ce53 3244
599aedd9
RH
3245 insn = PREV_INSN (insn);
3246 }
23b2ce53
RS
3247 }
3248
0435312e 3249 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3250
3251 delete_insn (trial);
3252 if (has_barrier)
3253 emit_barrier_after (tem);
3254
3255 /* Recursively call try_split for each new insn created; by the
3256 time control returns here that insn will be fully split, so
3257 set LAST and continue from the insn after the one returned.
3258 We can't use next_active_insn here since AFTER may be a note.
3259 Ignore deleted insns, which can be occur if not optimizing. */
3260 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3261 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3262 tem = try_split (PATTERN (tem), tem, 1);
3263
3264 /* Return either the first or the last insn, depending on which was
3265 requested. */
3266 return last
3267 ? (after ? PREV_INSN (after) : last_insn)
3268 : NEXT_INSN (before);
23b2ce53
RS
3269}
3270\f
3271/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3272 Store PATTERN in the pattern slots. */
23b2ce53
RS
3273
3274rtx
502b8322 3275make_insn_raw (rtx pattern)
23b2ce53 3276{
b3694847 3277 rtx insn;
23b2ce53 3278
1f8f4a0b 3279 insn = rtx_alloc (INSN);
23b2ce53 3280
43127294 3281 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3282 PATTERN (insn) = pattern;
3283 INSN_CODE (insn) = -1;
1632afca
RS
3284 LOG_LINKS (insn) = NULL;
3285 REG_NOTES (insn) = NULL;
0435312e 3286 INSN_LOCATOR (insn) = 0;
ba4f7968 3287 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3288
47984720
NC
3289#ifdef ENABLE_RTL_CHECKING
3290 if (insn
2c3c49de 3291 && INSN_P (insn)
47984720
NC
3292 && (returnjump_p (insn)
3293 || (GET_CODE (insn) == SET
3294 && SET_DEST (insn) == pc_rtx)))
3295 {
d4ee4d25 3296 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3297 debug_rtx (insn);
3298 }
3299#endif
750c9258 3300
23b2ce53
RS
3301 return insn;
3302}
3303
2f937369 3304/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3305
38109dab 3306rtx
502b8322 3307make_jump_insn_raw (rtx pattern)
23b2ce53 3308{
b3694847 3309 rtx insn;
23b2ce53 3310
4b1f5e8c 3311 insn = rtx_alloc (JUMP_INSN);
1632afca 3312 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3313
3314 PATTERN (insn) = pattern;
3315 INSN_CODE (insn) = -1;
1632afca
RS
3316 LOG_LINKS (insn) = NULL;
3317 REG_NOTES (insn) = NULL;
3318 JUMP_LABEL (insn) = NULL;
0435312e 3319 INSN_LOCATOR (insn) = 0;
ba4f7968 3320 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3321
3322 return insn;
3323}
aff507f4 3324
2f937369 3325/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3326
3327static rtx
502b8322 3328make_call_insn_raw (rtx pattern)
aff507f4 3329{
b3694847 3330 rtx insn;
aff507f4
RK
3331
3332 insn = rtx_alloc (CALL_INSN);
3333 INSN_UID (insn) = cur_insn_uid++;
3334
3335 PATTERN (insn) = pattern;
3336 INSN_CODE (insn) = -1;
3337 LOG_LINKS (insn) = NULL;
3338 REG_NOTES (insn) = NULL;
3339 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
0435312e 3340 INSN_LOCATOR (insn) = 0;
ba4f7968 3341 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3342
3343 return insn;
3344}
23b2ce53
RS
3345\f
3346/* Add INSN to the end of the doubly-linked list.
3347 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3348
3349void
502b8322 3350add_insn (rtx insn)
23b2ce53
RS
3351{
3352 PREV_INSN (insn) = last_insn;
3353 NEXT_INSN (insn) = 0;
3354
3355 if (NULL != last_insn)
3356 NEXT_INSN (last_insn) = insn;
3357
3358 if (NULL == first_insn)
3359 first_insn = insn;
3360
3361 last_insn = insn;
3362}
3363
a0ae8e8d
RK
3364/* Add INSN into the doubly-linked list after insn AFTER. This and
3365 the next should be the only functions called to insert an insn once
ba213285 3366 delay slots have been filled since only they know how to update a
a0ae8e8d 3367 SEQUENCE. */
23b2ce53
RS
3368
3369void
502b8322 3370add_insn_after (rtx insn, rtx after)
23b2ce53
RS
3371{
3372 rtx next = NEXT_INSN (after);
3c030e88 3373 basic_block bb;
23b2ce53 3374
5b0264cb 3375 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3376
23b2ce53
RS
3377 NEXT_INSN (insn) = next;
3378 PREV_INSN (insn) = after;
3379
3380 if (next)
3381 {
3382 PREV_INSN (next) = insn;
4b4bf941 3383 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3384 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3385 }
3386 else if (last_insn == after)
3387 last_insn = insn;
3388 else
3389 {
49ad7cfa 3390 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3391 /* Scan all pending sequences too. */
3392 for (; stack; stack = stack->next)
3393 if (after == stack->last)
fef0509b
RK
3394 {
3395 stack->last = insn;
3396 break;
3397 }
a0ae8e8d 3398
5b0264cb 3399 gcc_assert (stack);
23b2ce53
RS
3400 }
3401
4b4bf941
JQ
3402 if (!BARRIER_P (after)
3403 && !BARRIER_P (insn)
3c030e88
JH
3404 && (bb = BLOCK_FOR_INSN (after)))
3405 {
3406 set_block_for_insn (insn, bb);
38c1593d 3407 if (INSN_P (insn))
68252e27 3408 bb->flags |= BB_DIRTY;
3c030e88 3409 /* Should not happen as first in the BB is always
a1f300c0 3410 either NOTE or LABEL. */
a813c111 3411 if (BB_END (bb) == after
3c030e88 3412 /* Avoid clobbering of structure when creating new BB. */
4b4bf941
JQ
3413 && !BARRIER_P (insn)
3414 && (!NOTE_P (insn)
3c030e88 3415 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
a813c111 3416 BB_END (bb) = insn;
3c030e88
JH
3417 }
3418
23b2ce53 3419 NEXT_INSN (after) = insn;
4b4bf941 3420 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3421 {
3422 rtx sequence = PATTERN (after);
3423 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3424 }
3425}
3426
a0ae8e8d
RK
3427/* Add INSN into the doubly-linked list before insn BEFORE. This and
3428 the previous should be the only functions called to insert an insn once
ba213285 3429 delay slots have been filled since only they know how to update a
a0ae8e8d
RK
3430 SEQUENCE. */
3431
3432void
502b8322 3433add_insn_before (rtx insn, rtx before)
a0ae8e8d
RK
3434{
3435 rtx prev = PREV_INSN (before);
3c030e88 3436 basic_block bb;
a0ae8e8d 3437
5b0264cb 3438 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3439
a0ae8e8d
RK
3440 PREV_INSN (insn) = prev;
3441 NEXT_INSN (insn) = before;
3442
3443 if (prev)
3444 {
3445 NEXT_INSN (prev) = insn;
4b4bf941 3446 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3447 {
3448 rtx sequence = PATTERN (prev);
3449 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3450 }
3451 }
3452 else if (first_insn == before)
3453 first_insn = insn;
3454 else
3455 {
49ad7cfa 3456 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3457 /* Scan all pending sequences too. */
3458 for (; stack; stack = stack->next)
3459 if (before == stack->first)
fef0509b
RK
3460 {
3461 stack->first = insn;
3462 break;
3463 }
a0ae8e8d 3464
5b0264cb 3465 gcc_assert (stack);
a0ae8e8d
RK
3466 }
3467
4b4bf941
JQ
3468 if (!BARRIER_P (before)
3469 && !BARRIER_P (insn)
3c030e88
JH
3470 && (bb = BLOCK_FOR_INSN (before)))
3471 {
3472 set_block_for_insn (insn, bb);
38c1593d 3473 if (INSN_P (insn))
68252e27 3474 bb->flags |= BB_DIRTY;
5b0264cb 3475 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3476 LABEL. */
5b0264cb
NS
3477 gcc_assert (BB_HEAD (bb) != insn
3478 /* Avoid clobbering of structure when creating new BB. */
3479 || BARRIER_P (insn)
3480 || (NOTE_P (insn)
3481 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3c030e88
JH
3482 }
3483
a0ae8e8d 3484 PREV_INSN (before) = insn;
4b4bf941 3485 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3486 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3487}
3488
89e99eea
DB
3489/* Remove an insn from its doubly-linked list. This function knows how
3490 to handle sequences. */
3491void
502b8322 3492remove_insn (rtx insn)
89e99eea
DB
3493{
3494 rtx next = NEXT_INSN (insn);
3495 rtx prev = PREV_INSN (insn);
53c17031
JH
3496 basic_block bb;
3497
89e99eea
DB
3498 if (prev)
3499 {
3500 NEXT_INSN (prev) = next;
4b4bf941 3501 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3502 {
3503 rtx sequence = PATTERN (prev);
3504 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3505 }
3506 }
3507 else if (first_insn == insn)
3508 first_insn = next;
3509 else
3510 {
49ad7cfa 3511 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3512 /* Scan all pending sequences too. */
3513 for (; stack; stack = stack->next)
3514 if (insn == stack->first)
3515 {
3516 stack->first = next;
3517 break;
3518 }
3519
5b0264cb 3520 gcc_assert (stack);
89e99eea
DB
3521 }
3522
3523 if (next)
3524 {
3525 PREV_INSN (next) = prev;
4b4bf941 3526 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3527 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3528 }
3529 else if (last_insn == insn)
3530 last_insn = prev;
3531 else
3532 {
49ad7cfa 3533 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3534 /* Scan all pending sequences too. */
3535 for (; stack; stack = stack->next)
3536 if (insn == stack->last)
3537 {
3538 stack->last = prev;
3539 break;
3540 }
3541
5b0264cb 3542 gcc_assert (stack);
89e99eea 3543 }
4b4bf941 3544 if (!BARRIER_P (insn)
53c17031
JH
3545 && (bb = BLOCK_FOR_INSN (insn)))
3546 {
38c1593d 3547 if (INSN_P (insn))
68252e27 3548 bb->flags |= BB_DIRTY;
a813c111 3549 if (BB_HEAD (bb) == insn)
53c17031 3550 {
3bf1e984
RK
3551 /* Never ever delete the basic block note without deleting whole
3552 basic block. */
5b0264cb 3553 gcc_assert (!NOTE_P (insn));
a813c111 3554 BB_HEAD (bb) = next;
53c17031 3555 }
a813c111
SB
3556 if (BB_END (bb) == insn)
3557 BB_END (bb) = prev;
53c17031 3558 }
89e99eea
DB
3559}
3560
ee960939
OH
3561/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3562
3563void
502b8322 3564add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 3565{
5b0264cb 3566 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
3567
3568 /* Put the register usage information on the CALL. If there is already
3569 some usage information, put ours at the end. */
3570 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3571 {
3572 rtx link;
3573
3574 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3575 link = XEXP (link, 1))
3576 ;
3577
3578 XEXP (link, 1) = call_fusage;
3579 }
3580 else
3581 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3582}
3583
23b2ce53
RS
3584/* Delete all insns made since FROM.
3585 FROM becomes the new last instruction. */
3586
3587void
502b8322 3588delete_insns_since (rtx from)
23b2ce53
RS
3589{
3590 if (from == 0)
3591 first_insn = 0;
3592 else
3593 NEXT_INSN (from) = 0;
3594 last_insn = from;
3595}
3596
5dab5552
MS
3597/* This function is deprecated, please use sequences instead.
3598
3599 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3600 The insns to be moved are those between FROM and TO.
3601 They are moved to a new position after the insn AFTER.
3602 AFTER must not be FROM or TO or any insn in between.
3603
3604 This function does not know about SEQUENCEs and hence should not be
3605 called after delay-slot filling has been done. */
3606
3607void
502b8322 3608reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53
RS
3609{
3610 /* Splice this bunch out of where it is now. */
3611 if (PREV_INSN (from))
3612 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3613 if (NEXT_INSN (to))
3614 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3615 if (last_insn == to)
3616 last_insn = PREV_INSN (from);
3617 if (first_insn == from)
3618 first_insn = NEXT_INSN (to);
3619
3620 /* Make the new neighbors point to it and it to them. */
3621 if (NEXT_INSN (after))
3622 PREV_INSN (NEXT_INSN (after)) = to;
3623
3624 NEXT_INSN (to) = NEXT_INSN (after);
3625 PREV_INSN (from) = after;
3626 NEXT_INSN (after) = from;
3627 if (after == last_insn)
3628 last_insn = to;
3629}
3630
3c030e88
JH
3631/* Same as function above, but take care to update BB boundaries. */
3632void
502b8322 3633reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
3634{
3635 rtx prev = PREV_INSN (from);
3636 basic_block bb, bb2;
3637
3638 reorder_insns_nobb (from, to, after);
3639
4b4bf941 3640 if (!BARRIER_P (after)
3c030e88
JH
3641 && (bb = BLOCK_FOR_INSN (after)))
3642 {
3643 rtx x;
38c1593d 3644 bb->flags |= BB_DIRTY;
68252e27 3645
4b4bf941 3646 if (!BARRIER_P (from)
3c030e88
JH
3647 && (bb2 = BLOCK_FOR_INSN (from)))
3648 {
a813c111
SB
3649 if (BB_END (bb2) == to)
3650 BB_END (bb2) = prev;
38c1593d 3651 bb2->flags |= BB_DIRTY;
3c030e88
JH
3652 }
3653
a813c111
SB
3654 if (BB_END (bb) == after)
3655 BB_END (bb) = to;
3c030e88
JH
3656
3657 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c
GS
3658 if (!BARRIER_P (x))
3659 set_block_for_insn (x, bb);
3c030e88
JH
3660 }
3661}
3662
23b2ce53
RS
3663/* Return the line note insn preceding INSN. */
3664
3665static rtx
502b8322 3666find_line_note (rtx insn)
23b2ce53
RS
3667{
3668 if (no_line_numbers)
3669 return 0;
3670
3671 for (; insn; insn = PREV_INSN (insn))
4b4bf941 3672 if (NOTE_P (insn)
0fb7aeda 3673 && NOTE_LINE_NUMBER (insn) >= 0)
23b2ce53
RS
3674 break;
3675
3676 return insn;
3677}
3678
64b59a80 3679/* Remove unnecessary notes from the instruction stream. */
aeeeda03
MM
3680
3681void
502b8322 3682remove_unnecessary_notes (void)
aeeeda03 3683{
542d73ae 3684 rtx eh_stack = NULL_RTX;
aeeeda03
MM
3685 rtx insn;
3686 rtx next;
542d73ae 3687 rtx tmp;
aeeeda03 3688
116eebd6
MM
3689 /* We must not remove the first instruction in the function because
3690 the compiler depends on the first instruction being a note. */
aeeeda03
MM
3691 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3692 {
3693 /* Remember what's next. */
3694 next = NEXT_INSN (insn);
3695
3696 /* We're only interested in notes. */
4b4bf941 3697 if (!NOTE_P (insn))
aeeeda03
MM
3698 continue;
3699
542d73ae 3700 switch (NOTE_LINE_NUMBER (insn))
18c038b9 3701 {
542d73ae
RH
3702 case NOTE_INSN_DELETED:
3703 remove_insn (insn);
3704 break;
3705
3706 case NOTE_INSN_EH_REGION_BEG:
3707 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3708 break;
3709
3710 case NOTE_INSN_EH_REGION_END:
3711 /* Too many end notes. */
5b0264cb 3712 gcc_assert (eh_stack);
542d73ae 3713 /* Mismatched nesting. */
5b0264cb
NS
3714 gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0))
3715 == NOTE_EH_HANDLER (insn));
542d73ae
RH
3716 tmp = eh_stack;
3717 eh_stack = XEXP (eh_stack, 1);
3718 free_INSN_LIST_node (tmp);
3719 break;
3720
3721 case NOTE_INSN_BLOCK_BEG:
542d73ae 3722 case NOTE_INSN_BLOCK_END:
f39e46ba
SB
3723 /* BLOCK_END and BLOCK_BEG notes only exist in the `final' pass. */
3724 gcc_unreachable ();
542d73ae 3725
f39e46ba
SB
3726 default:
3727 break;
18c038b9 3728 }
aeeeda03 3729 }
542d73ae 3730
f39e46ba
SB
3731 /* Too many EH_REGION_BEG notes. */
3732 gcc_assert (!eh_stack);
aeeeda03
MM
3733}
3734
ef330312
PB
3735struct tree_opt_pass pass_remove_unnecessary_notes =
3736{
defb77dc 3737 "eunotes", /* name */
ef330312
PB
3738 NULL, /* gate */
3739 remove_unnecessary_notes, /* execute */
3740 NULL, /* sub */
3741 NULL, /* next */
3742 0, /* static_pass_number */
3743 0, /* tv_id */
3744 0, /* properties_required */
3745 0, /* properties_provided */
3746 0, /* properties_destroyed */
3747 0, /* todo_flags_start */
defb77dc 3748 TODO_dump_func, /* todo_flags_finish */
ef330312
PB
3749 0 /* letter */
3750};
3751
23b2ce53 3752\f
2f937369
DM
3753/* Emit insn(s) of given code and pattern
3754 at a specified place within the doubly-linked list.
23b2ce53 3755
2f937369
DM
3756 All of the emit_foo global entry points accept an object
3757 X which is either an insn list or a PATTERN of a single
3758 instruction.
23b2ce53 3759
2f937369
DM
3760 There are thus a few canonical ways to generate code and
3761 emit it at a specific place in the instruction stream. For
3762 example, consider the instruction named SPOT and the fact that
3763 we would like to emit some instructions before SPOT. We might
3764 do it like this:
23b2ce53 3765
2f937369
DM
3766 start_sequence ();
3767 ... emit the new instructions ...
3768 insns_head = get_insns ();
3769 end_sequence ();
23b2ce53 3770
2f937369 3771 emit_insn_before (insns_head, SPOT);
23b2ce53 3772
2f937369
DM
3773 It used to be common to generate SEQUENCE rtl instead, but that
3774 is a relic of the past which no longer occurs. The reason is that
3775 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3776 generated would almost certainly die right after it was created. */
23b2ce53 3777
2f937369 3778/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
3779
3780rtx
a7102479 3781emit_insn_before_noloc (rtx x, rtx before)
23b2ce53 3782{
2f937369 3783 rtx last = before;
b3694847 3784 rtx insn;
23b2ce53 3785
5b0264cb 3786 gcc_assert (before);
2f937369
DM
3787
3788 if (x == NULL_RTX)
3789 return last;
3790
3791 switch (GET_CODE (x))
23b2ce53 3792 {
2f937369
DM
3793 case INSN:
3794 case JUMP_INSN:
3795 case CALL_INSN:
3796 case CODE_LABEL:
3797 case BARRIER:
3798 case NOTE:
3799 insn = x;
3800 while (insn)
3801 {
3802 rtx next = NEXT_INSN (insn);
3803 add_insn_before (insn, before);
3804 last = insn;
3805 insn = next;
3806 }
3807 break;
3808
3809#ifdef ENABLE_RTL_CHECKING
3810 case SEQUENCE:
5b0264cb 3811 gcc_unreachable ();
2f937369
DM
3812 break;
3813#endif
3814
3815 default:
3816 last = make_insn_raw (x);
3817 add_insn_before (last, before);
3818 break;
23b2ce53
RS
3819 }
3820
2f937369 3821 return last;
23b2ce53
RS
3822}
3823
2f937369 3824/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
3825 and output it before the instruction BEFORE. */
3826
3827rtx
a7102479 3828emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 3829{
d950dee3 3830 rtx insn, last = NULL_RTX;
aff507f4 3831
5b0264cb 3832 gcc_assert (before);
2f937369
DM
3833
3834 switch (GET_CODE (x))
aff507f4 3835 {
2f937369
DM
3836 case INSN:
3837 case JUMP_INSN:
3838 case CALL_INSN:
3839 case CODE_LABEL:
3840 case BARRIER:
3841 case NOTE:
3842 insn = x;
3843 while (insn)
3844 {
3845 rtx next = NEXT_INSN (insn);
3846 add_insn_before (insn, before);
3847 last = insn;
3848 insn = next;
3849 }
3850 break;
3851
3852#ifdef ENABLE_RTL_CHECKING
3853 case SEQUENCE:
5b0264cb 3854 gcc_unreachable ();
2f937369
DM
3855 break;
3856#endif
3857
3858 default:
3859 last = make_jump_insn_raw (x);
3860 add_insn_before (last, before);
3861 break;
aff507f4
RK
3862 }
3863
2f937369 3864 return last;
23b2ce53
RS
3865}
3866
2f937369 3867/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
3868 and output it before the instruction BEFORE. */
3869
3870rtx
a7102479 3871emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 3872{
d950dee3 3873 rtx last = NULL_RTX, insn;
969d70ca 3874
5b0264cb 3875 gcc_assert (before);
2f937369
DM
3876
3877 switch (GET_CODE (x))
969d70ca 3878 {
2f937369
DM
3879 case INSN:
3880 case JUMP_INSN:
3881 case CALL_INSN:
3882 case CODE_LABEL:
3883 case BARRIER:
3884 case NOTE:
3885 insn = x;
3886 while (insn)
3887 {
3888 rtx next = NEXT_INSN (insn);
3889 add_insn_before (insn, before);
3890 last = insn;
3891 insn = next;
3892 }
3893 break;
3894
3895#ifdef ENABLE_RTL_CHECKING
3896 case SEQUENCE:
5b0264cb 3897 gcc_unreachable ();
2f937369
DM
3898 break;
3899#endif
3900
3901 default:
3902 last = make_call_insn_raw (x);
3903 add_insn_before (last, before);
3904 break;
969d70ca
JH
3905 }
3906
2f937369 3907 return last;
969d70ca
JH
3908}
3909
23b2ce53 3910/* Make an insn of code BARRIER
e881bb1b 3911 and output it before the insn BEFORE. */
23b2ce53
RS
3912
3913rtx
502b8322 3914emit_barrier_before (rtx before)
23b2ce53 3915{
b3694847 3916 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
3917
3918 INSN_UID (insn) = cur_insn_uid++;
3919
a0ae8e8d 3920 add_insn_before (insn, before);
23b2ce53
RS
3921 return insn;
3922}
3923
e881bb1b
RH
3924/* Emit the label LABEL before the insn BEFORE. */
3925
3926rtx
502b8322 3927emit_label_before (rtx label, rtx before)
e881bb1b
RH
3928{
3929 /* This can be called twice for the same label as a result of the
3930 confusion that follows a syntax error! So make it harmless. */
3931 if (INSN_UID (label) == 0)
3932 {
3933 INSN_UID (label) = cur_insn_uid++;
3934 add_insn_before (label, before);
3935 }
3936
3937 return label;
3938}
3939
23b2ce53
RS
3940/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3941
3942rtx
502b8322 3943emit_note_before (int subtype, rtx before)
23b2ce53 3944{
b3694847 3945 rtx note = rtx_alloc (NOTE);
23b2ce53 3946 INSN_UID (note) = cur_insn_uid++;
6773e15f 3947#ifndef USE_MAPPED_LOCATION
23b2ce53 3948 NOTE_SOURCE_FILE (note) = 0;
6773e15f 3949#endif
23b2ce53 3950 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 3951 BLOCK_FOR_INSN (note) = NULL;
23b2ce53 3952
a0ae8e8d 3953 add_insn_before (note, before);
23b2ce53
RS
3954 return note;
3955}
3956\f
2f937369
DM
3957/* Helper for emit_insn_after, handles lists of instructions
3958 efficiently. */
23b2ce53 3959
502b8322 3960static rtx emit_insn_after_1 (rtx, rtx);
2f937369
DM
3961
3962static rtx
502b8322 3963emit_insn_after_1 (rtx first, rtx after)
23b2ce53 3964{
2f937369
DM
3965 rtx last;
3966 rtx after_after;
3967 basic_block bb;
23b2ce53 3968
4b4bf941 3969 if (!BARRIER_P (after)
2f937369 3970 && (bb = BLOCK_FOR_INSN (after)))
23b2ce53 3971 {
2f937369
DM
3972 bb->flags |= BB_DIRTY;
3973 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 3974 if (!BARRIER_P (last))
2f937369 3975 set_block_for_insn (last, bb);
4b4bf941 3976 if (!BARRIER_P (last))
2f937369 3977 set_block_for_insn (last, bb);
a813c111
SB
3978 if (BB_END (bb) == after)
3979 BB_END (bb) = last;
23b2ce53
RS
3980 }
3981 else
2f937369
DM
3982 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3983 continue;
3984
3985 after_after = NEXT_INSN (after);
3986
3987 NEXT_INSN (after) = first;
3988 PREV_INSN (first) = after;
3989 NEXT_INSN (last) = after_after;
3990 if (after_after)
3991 PREV_INSN (after_after) = last;
3992
3993 if (after == last_insn)
3994 last_insn = last;
3995 return last;
3996}
3997
3998/* Make X be output after the insn AFTER. */
3999
4000rtx
a7102479 4001emit_insn_after_noloc (rtx x, rtx after)
2f937369
DM
4002{
4003 rtx last = after;
4004
5b0264cb 4005 gcc_assert (after);
2f937369
DM
4006
4007 if (x == NULL_RTX)
4008 return last;
4009
4010 switch (GET_CODE (x))
23b2ce53 4011 {
2f937369
DM
4012 case INSN:
4013 case JUMP_INSN:
4014 case CALL_INSN:
4015 case CODE_LABEL:
4016 case BARRIER:
4017 case NOTE:
4018 last = emit_insn_after_1 (x, after);
4019 break;
4020
4021#ifdef ENABLE_RTL_CHECKING
4022 case SEQUENCE:
5b0264cb 4023 gcc_unreachable ();
2f937369
DM
4024 break;
4025#endif
4026
4027 default:
4028 last = make_insn_raw (x);
4029 add_insn_after (last, after);
4030 break;
23b2ce53
RS
4031 }
4032
2f937369 4033 return last;
23b2ce53
RS
4034}
4035
255680cf
RK
4036/* Similar to emit_insn_after, except that line notes are to be inserted so
4037 as to act as if this insn were at FROM. */
4038
4039void
502b8322 4040emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
255680cf
RK
4041{
4042 rtx from_line = find_line_note (from);
4043 rtx after_line = find_line_note (after);
2f937369 4044 rtx insn = emit_insn_after (x, after);
255680cf
RK
4045
4046 if (from_line)
5f2fc772 4047 emit_note_copy_after (from_line, after);
255680cf
RK
4048
4049 if (after_line)
5f2fc772 4050 emit_note_copy_after (after_line, insn);
255680cf
RK
4051}
4052
2f937369 4053/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4054 and output it after the insn AFTER. */
4055
4056rtx
a7102479 4057emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4058{
2f937369 4059 rtx last;
23b2ce53 4060
5b0264cb 4061 gcc_assert (after);
2f937369
DM
4062
4063 switch (GET_CODE (x))
23b2ce53 4064 {
2f937369
DM
4065 case INSN:
4066 case JUMP_INSN:
4067 case CALL_INSN:
4068 case CODE_LABEL:
4069 case BARRIER:
4070 case NOTE:
4071 last = emit_insn_after_1 (x, after);
4072 break;
4073
4074#ifdef ENABLE_RTL_CHECKING
4075 case SEQUENCE:
5b0264cb 4076 gcc_unreachable ();
2f937369
DM
4077 break;
4078#endif
4079
4080 default:
4081 last = make_jump_insn_raw (x);
4082 add_insn_after (last, after);
4083 break;
23b2ce53
RS
4084 }
4085
2f937369
DM
4086 return last;
4087}
4088
4089/* Make an instruction with body X and code CALL_INSN
4090 and output it after the instruction AFTER. */
4091
4092rtx
a7102479 4093emit_call_insn_after_noloc (rtx x, rtx after)
2f937369
DM
4094{
4095 rtx last;
4096
5b0264cb 4097 gcc_assert (after);
2f937369
DM
4098
4099 switch (GET_CODE (x))
4100 {
4101 case INSN:
4102 case JUMP_INSN:
4103 case CALL_INSN:
4104 case CODE_LABEL:
4105 case BARRIER:
4106 case NOTE:
4107 last = emit_insn_after_1 (x, after);
4108 break;
4109
4110#ifdef ENABLE_RTL_CHECKING
4111 case SEQUENCE:
5b0264cb 4112 gcc_unreachable ();
2f937369
DM
4113 break;
4114#endif
4115
4116 default:
4117 last = make_call_insn_raw (x);
4118 add_insn_after (last, after);
4119 break;
4120 }
4121
4122 return last;
23b2ce53
RS
4123}
4124
4125/* Make an insn of code BARRIER
4126 and output it after the insn AFTER. */
4127
4128rtx
502b8322 4129emit_barrier_after (rtx after)
23b2ce53 4130{
b3694847 4131 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4132
4133 INSN_UID (insn) = cur_insn_uid++;
4134
4135 add_insn_after (insn, after);
4136 return insn;
4137}
4138
4139/* Emit the label LABEL after the insn AFTER. */
4140
4141rtx
502b8322 4142emit_label_after (rtx label, rtx after)
23b2ce53
RS
4143{
4144 /* This can be called twice for the same label
4145 as a result of the confusion that follows a syntax error!
4146 So make it harmless. */
4147 if (INSN_UID (label) == 0)
4148 {
4149 INSN_UID (label) = cur_insn_uid++;
4150 add_insn_after (label, after);
4151 }
4152
4153 return label;
4154}
4155
4156/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4157
4158rtx
502b8322 4159emit_note_after (int subtype, rtx after)
23b2ce53 4160{
b3694847 4161 rtx note = rtx_alloc (NOTE);
23b2ce53 4162 INSN_UID (note) = cur_insn_uid++;
6773e15f 4163#ifndef USE_MAPPED_LOCATION
23b2ce53 4164 NOTE_SOURCE_FILE (note) = 0;
6773e15f 4165#endif
23b2ce53 4166 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 4167 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4168 add_insn_after (note, after);
4169 return note;
4170}
4171
5f2fc772 4172/* Emit a copy of note ORIG after the insn AFTER. */
23b2ce53
RS
4173
4174rtx
5f2fc772 4175emit_note_copy_after (rtx orig, rtx after)
23b2ce53 4176{
b3694847 4177 rtx note;
23b2ce53 4178
5f2fc772 4179 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
23b2ce53
RS
4180 {
4181 cur_insn_uid++;
4182 return 0;
4183 }
4184
68252e27 4185 note = rtx_alloc (NOTE);
23b2ce53 4186 INSN_UID (note) = cur_insn_uid++;
5f2fc772
NS
4187 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4188 NOTE_DATA (note) = NOTE_DATA (orig);
ba4f7968 4189 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4190 add_insn_after (note, after);
4191 return note;
4192}
4193\f
a7102479 4194/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4195rtx
502b8322 4196emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4197{
a7102479 4198 rtx last = emit_insn_after_noloc (pattern, after);
0d682900 4199
a7102479 4200 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4201 return last;
4202
2f937369
DM
4203 after = NEXT_INSN (after);
4204 while (1)
4205 {
a7102479 4206 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4207 INSN_LOCATOR (after) = loc;
2f937369
DM
4208 if (after == last)
4209 break;
4210 after = NEXT_INSN (after);
4211 }
0d682900
JH
4212 return last;
4213}
4214
a7102479
JH
4215/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4216rtx
4217emit_insn_after (rtx pattern, rtx after)
4218{
4219 if (INSN_P (after))
4220 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4221 else
4222 return emit_insn_after_noloc (pattern, after);
4223}
4224
4225/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4226rtx
502b8322 4227emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4228{
a7102479 4229 rtx last = emit_jump_insn_after_noloc (pattern, after);
2f937369 4230
a7102479 4231 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4232 return last;
4233
2f937369
DM
4234 after = NEXT_INSN (after);
4235 while (1)
4236 {
a7102479 4237 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4238 INSN_LOCATOR (after) = loc;
2f937369
DM
4239 if (after == last)
4240 break;
4241 after = NEXT_INSN (after);
4242 }
0d682900
JH
4243 return last;
4244}
4245
a7102479
JH
4246/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4247rtx
4248emit_jump_insn_after (rtx pattern, rtx after)
4249{
4250 if (INSN_P (after))
4251 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4252 else
4253 return emit_jump_insn_after_noloc (pattern, after);
4254}
4255
4256/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4257rtx
502b8322 4258emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4259{
a7102479 4260 rtx last = emit_call_insn_after_noloc (pattern, after);
2f937369 4261
a7102479 4262 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4263 return last;
4264
2f937369
DM
4265 after = NEXT_INSN (after);
4266 while (1)
4267 {
a7102479 4268 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4269 INSN_LOCATOR (after) = loc;
2f937369
DM
4270 if (after == last)
4271 break;
4272 after = NEXT_INSN (after);
4273 }
0d682900
JH
4274 return last;
4275}
4276
a7102479
JH
4277/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4278rtx
4279emit_call_insn_after (rtx pattern, rtx after)
4280{
4281 if (INSN_P (after))
4282 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4283 else
4284 return emit_call_insn_after_noloc (pattern, after);
4285}
4286
4287/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4288rtx
502b8322 4289emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4290{
4291 rtx first = PREV_INSN (before);
a7102479
JH
4292 rtx last = emit_insn_before_noloc (pattern, before);
4293
4294 if (pattern == NULL_RTX || !loc)
4295 return last;
4296
4297 first = NEXT_INSN (first);
4298 while (1)
4299 {
4300 if (active_insn_p (first) && !INSN_LOCATOR (first))
4301 INSN_LOCATOR (first) = loc;
4302 if (first == last)
4303 break;
4304 first = NEXT_INSN (first);
4305 }
4306 return last;
4307}
4308
4309/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4310rtx
4311emit_insn_before (rtx pattern, rtx before)
4312{
4313 if (INSN_P (before))
4314 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4315 else
4316 return emit_insn_before_noloc (pattern, before);
4317}
4318
4319/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4320rtx
4321emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4322{
4323 rtx first = PREV_INSN (before);
4324 rtx last = emit_jump_insn_before_noloc (pattern, before);
4325
4326 if (pattern == NULL_RTX)
4327 return last;
4328
4329 first = NEXT_INSN (first);
4330 while (1)
4331 {
4332 if (active_insn_p (first) && !INSN_LOCATOR (first))
4333 INSN_LOCATOR (first) = loc;
4334 if (first == last)
4335 break;
4336 first = NEXT_INSN (first);
4337 }
4338 return last;
4339}
4340
4341/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4342rtx
4343emit_jump_insn_before (rtx pattern, rtx before)
4344{
4345 if (INSN_P (before))
4346 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4347 else
4348 return emit_jump_insn_before_noloc (pattern, before);
4349}
4350
4351/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4352rtx
4353emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4354{
4355 rtx first = PREV_INSN (before);
4356 rtx last = emit_call_insn_before_noloc (pattern, before);
0d682900 4357
dd3adcf8
DJ
4358 if (pattern == NULL_RTX)
4359 return last;
4360
2f937369
DM
4361 first = NEXT_INSN (first);
4362 while (1)
4363 {
a7102479 4364 if (active_insn_p (first) && !INSN_LOCATOR (first))
0435312e 4365 INSN_LOCATOR (first) = loc;
2f937369
DM
4366 if (first == last)
4367 break;
4368 first = NEXT_INSN (first);
4369 }
0d682900
JH
4370 return last;
4371}
a7102479
JH
4372
4373/* like emit_call_insn_before_noloc,
4374 but set insn_locator according to before. */
4375rtx
4376emit_call_insn_before (rtx pattern, rtx before)
4377{
4378 if (INSN_P (before))
4379 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4380 else
4381 return emit_call_insn_before_noloc (pattern, before);
4382}
0d682900 4383\f
2f937369
DM
4384/* Take X and emit it at the end of the doubly-linked
4385 INSN list.
23b2ce53
RS
4386
4387 Returns the last insn emitted. */
4388
4389rtx
502b8322 4390emit_insn (rtx x)
23b2ce53 4391{
2f937369
DM
4392 rtx last = last_insn;
4393 rtx insn;
23b2ce53 4394
2f937369
DM
4395 if (x == NULL_RTX)
4396 return last;
23b2ce53 4397
2f937369
DM
4398 switch (GET_CODE (x))
4399 {
4400 case INSN:
4401 case JUMP_INSN:
4402 case CALL_INSN:
4403 case CODE_LABEL:
4404 case BARRIER:
4405 case NOTE:
4406 insn = x;
4407 while (insn)
23b2ce53 4408 {
2f937369 4409 rtx next = NEXT_INSN (insn);
23b2ce53 4410 add_insn (insn);
2f937369
DM
4411 last = insn;
4412 insn = next;
23b2ce53 4413 }
2f937369 4414 break;
23b2ce53 4415
2f937369
DM
4416#ifdef ENABLE_RTL_CHECKING
4417 case SEQUENCE:
5b0264cb 4418 gcc_unreachable ();
2f937369
DM
4419 break;
4420#endif
23b2ce53 4421
2f937369
DM
4422 default:
4423 last = make_insn_raw (x);
4424 add_insn (last);
4425 break;
23b2ce53
RS
4426 }
4427
4428 return last;
4429}
4430
2f937369
DM
4431/* Make an insn of code JUMP_INSN with pattern X
4432 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4433
4434rtx
502b8322 4435emit_jump_insn (rtx x)
23b2ce53 4436{
d950dee3 4437 rtx last = NULL_RTX, insn;
23b2ce53 4438
2f937369 4439 switch (GET_CODE (x))
23b2ce53 4440 {
2f937369
DM
4441 case INSN:
4442 case JUMP_INSN:
4443 case CALL_INSN:
4444 case CODE_LABEL:
4445 case BARRIER:
4446 case NOTE:
4447 insn = x;
4448 while (insn)
4449 {
4450 rtx next = NEXT_INSN (insn);
4451 add_insn (insn);
4452 last = insn;
4453 insn = next;
4454 }
4455 break;
e0a5c5eb 4456
2f937369
DM
4457#ifdef ENABLE_RTL_CHECKING
4458 case SEQUENCE:
5b0264cb 4459 gcc_unreachable ();
2f937369
DM
4460 break;
4461#endif
e0a5c5eb 4462
2f937369
DM
4463 default:
4464 last = make_jump_insn_raw (x);
4465 add_insn (last);
4466 break;
3c030e88 4467 }
e0a5c5eb
RS
4468
4469 return last;
4470}
4471
2f937369 4472/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4473 and add it to the end of the doubly-linked list. */
4474
4475rtx
502b8322 4476emit_call_insn (rtx x)
23b2ce53 4477{
2f937369
DM
4478 rtx insn;
4479
4480 switch (GET_CODE (x))
23b2ce53 4481 {
2f937369
DM
4482 case INSN:
4483 case JUMP_INSN:
4484 case CALL_INSN:
4485 case CODE_LABEL:
4486 case BARRIER:
4487 case NOTE:
4488 insn = emit_insn (x);
4489 break;
23b2ce53 4490
2f937369
DM
4491#ifdef ENABLE_RTL_CHECKING
4492 case SEQUENCE:
5b0264cb 4493 gcc_unreachable ();
2f937369
DM
4494 break;
4495#endif
23b2ce53 4496
2f937369
DM
4497 default:
4498 insn = make_call_insn_raw (x);
23b2ce53 4499 add_insn (insn);
2f937369 4500 break;
23b2ce53 4501 }
2f937369
DM
4502
4503 return insn;
23b2ce53
RS
4504}
4505
4506/* Add the label LABEL to the end of the doubly-linked list. */
4507
4508rtx
502b8322 4509emit_label (rtx label)
23b2ce53
RS
4510{
4511 /* This can be called twice for the same label
4512 as a result of the confusion that follows a syntax error!
4513 So make it harmless. */
4514 if (INSN_UID (label) == 0)
4515 {
4516 INSN_UID (label) = cur_insn_uid++;
4517 add_insn (label);
4518 }
4519 return label;
4520}
4521
4522/* Make an insn of code BARRIER
4523 and add it to the end of the doubly-linked list. */
4524
4525rtx
502b8322 4526emit_barrier (void)
23b2ce53 4527{
b3694847 4528 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4529 INSN_UID (barrier) = cur_insn_uid++;
4530 add_insn (barrier);
4531 return barrier;
4532}
4533
0cea056b
NS
4534/* Make line numbering NOTE insn for LOCATION add it to the end
4535 of the doubly-linked list, but only if line-numbers are desired for
4536 debugging info and it doesn't match the previous one. */
23b2ce53
RS
4537
4538rtx
0cea056b 4539emit_line_note (location_t location)
23b2ce53 4540{
2e040219 4541 rtx note;
0cea056b 4542
6773e15f
PB
4543#ifdef USE_MAPPED_LOCATION
4544 if (location == last_location)
4545 return NULL_RTX;
4546#else
0cea056b
NS
4547 if (location.file && last_location.file
4548 && !strcmp (location.file, last_location.file)
4549 && location.line == last_location.line)
fd3acbb3 4550 return NULL_RTX;
6773e15f 4551#endif
0cea056b
NS
4552 last_location = location;
4553
23b2ce53 4554 if (no_line_numbers)
fd3acbb3
NS
4555 {
4556 cur_insn_uid++;
4557 return NULL_RTX;
4558 }
23b2ce53 4559
6773e15f
PB
4560#ifdef USE_MAPPED_LOCATION
4561 note = emit_note ((int) location);
4562#else
0cea056b
NS
4563 note = emit_note (location.line);
4564 NOTE_SOURCE_FILE (note) = location.file;
6773e15f 4565#endif
5f2fc772
NS
4566
4567 return note;
4568}
4569
4570/* Emit a copy of note ORIG. */
502b8322 4571
5f2fc772
NS
4572rtx
4573emit_note_copy (rtx orig)
4574{
4575 rtx note;
4576
4577 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4578 {
4579 cur_insn_uid++;
4580 return NULL_RTX;
4581 }
4582
4583 note = rtx_alloc (NOTE);
4584
4585 INSN_UID (note) = cur_insn_uid++;
4586 NOTE_DATA (note) = NOTE_DATA (orig);
4587 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4588 BLOCK_FOR_INSN (note) = NULL;
4589 add_insn (note);
4590
2e040219 4591 return note;
23b2ce53
RS
4592}
4593
2e040219
NS
4594/* Make an insn of code NOTE or type NOTE_NO
4595 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4596
4597rtx
502b8322 4598emit_note (int note_no)
23b2ce53 4599{
b3694847 4600 rtx note;
23b2ce53 4601
23b2ce53
RS
4602 note = rtx_alloc (NOTE);
4603 INSN_UID (note) = cur_insn_uid++;
2e040219 4604 NOTE_LINE_NUMBER (note) = note_no;
dd107e66 4605 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4606 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4607 add_insn (note);
4608 return note;
4609}
4610
23b2ce53 4611/* Cause next statement to emit a line note even if the line number
0cea056b 4612 has not changed. */
23b2ce53
RS
4613
4614void
502b8322 4615force_next_line_note (void)
23b2ce53 4616{
6773e15f
PB
4617#ifdef USE_MAPPED_LOCATION
4618 last_location = -1;
4619#else
fd3acbb3 4620 last_location.line = -1;
6773e15f 4621#endif
23b2ce53 4622}
87b47c85
AM
4623
4624/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4625 note of this type already exists, remove it first. */
87b47c85 4626
3d238248 4627rtx
502b8322 4628set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4629{
4630 rtx note = find_reg_note (insn, kind, NULL_RTX);
4631
52488da1
JW
4632 switch (kind)
4633 {
4634 case REG_EQUAL:
4635 case REG_EQUIV:
4636 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4637 has multiple sets (some callers assume single_set
4638 means the insn only has one set, when in fact it
4639 means the insn only has one * useful * set). */
4640 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4641 {
5b0264cb 4642 gcc_assert (!note);
52488da1
JW
4643 return NULL_RTX;
4644 }
4645
4646 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4647 It serves no useful purpose and breaks eliminate_regs. */
4648 if (GET_CODE (datum) == ASM_OPERANDS)
4649 return NULL_RTX;
4650 break;
4651
4652 default:
4653 break;
4654 }
3d238248 4655
750c9258 4656 if (note)
3d238248
JJ
4657 {
4658 XEXP (note, 0) = datum;
4659 return note;
4660 }
87b47c85
AM
4661
4662 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3d238248 4663 return REG_NOTES (insn);
87b47c85 4664}
23b2ce53
RS
4665\f
4666/* Return an indication of which type of insn should have X as a body.
4667 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4668
d78db459 4669static enum rtx_code
502b8322 4670classify_insn (rtx x)
23b2ce53 4671{
4b4bf941 4672 if (LABEL_P (x))
23b2ce53
RS
4673 return CODE_LABEL;
4674 if (GET_CODE (x) == CALL)
4675 return CALL_INSN;
4676 if (GET_CODE (x) == RETURN)
4677 return JUMP_INSN;
4678 if (GET_CODE (x) == SET)
4679 {
4680 if (SET_DEST (x) == pc_rtx)
4681 return JUMP_INSN;
4682 else if (GET_CODE (SET_SRC (x)) == CALL)
4683 return CALL_INSN;
4684 else
4685 return INSN;
4686 }
4687 if (GET_CODE (x) == PARALLEL)
4688 {
b3694847 4689 int j;
23b2ce53
RS
4690 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4691 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4692 return CALL_INSN;
4693 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4694 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4695 return JUMP_INSN;
4696 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4697 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4698 return CALL_INSN;
4699 }
4700 return INSN;
4701}
4702
4703/* Emit the rtl pattern X as an appropriate kind of insn.
4704 If X is a label, it is simply added into the insn chain. */
4705
4706rtx
502b8322 4707emit (rtx x)
23b2ce53
RS
4708{
4709 enum rtx_code code = classify_insn (x);
4710
5b0264cb 4711 switch (code)
23b2ce53 4712 {
5b0264cb
NS
4713 case CODE_LABEL:
4714 return emit_label (x);
4715 case INSN:
4716 return emit_insn (x);
4717 case JUMP_INSN:
4718 {
4719 rtx insn = emit_jump_insn (x);
4720 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4721 return emit_barrier ();
4722 return insn;
4723 }
4724 case CALL_INSN:
4725 return emit_call_insn (x);
4726 default:
4727 gcc_unreachable ();
23b2ce53 4728 }
23b2ce53
RS
4729}
4730\f
e2500fed 4731/* Space for free sequence stack entries. */
1431042e 4732static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 4733
4dfa0342
RH
4734/* Begin emitting insns to a sequence. If this sequence will contain
4735 something that might cause the compiler to pop arguments to function
4736 calls (because those pops have previously been deferred; see
4737 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4738 before calling this function. That will ensure that the deferred
4739 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
4740
4741void
502b8322 4742start_sequence (void)
23b2ce53
RS
4743{
4744 struct sequence_stack *tem;
4745
e2500fed
GK
4746 if (free_sequence_stack != NULL)
4747 {
4748 tem = free_sequence_stack;
4749 free_sequence_stack = tem->next;
4750 }
4751 else
703ad42b 4752 tem = ggc_alloc (sizeof (struct sequence_stack));
23b2ce53 4753
49ad7cfa 4754 tem->next = seq_stack;
23b2ce53
RS
4755 tem->first = first_insn;
4756 tem->last = last_insn;
4757
49ad7cfa 4758 seq_stack = tem;
23b2ce53
RS
4759
4760 first_insn = 0;
4761 last_insn = 0;
4762}
4763
5c7a310f
MM
4764/* Set up the insn chain starting with FIRST as the current sequence,
4765 saving the previously current one. See the documentation for
4766 start_sequence for more information about how to use this function. */
23b2ce53
RS
4767
4768void
502b8322 4769push_to_sequence (rtx first)
23b2ce53
RS
4770{
4771 rtx last;
4772
4773 start_sequence ();
4774
4775 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4776
4777 first_insn = first;
4778 last_insn = last;
4779}
4780
f15ae3a1
TW
4781/* Set up the outer-level insn chain
4782 as the current sequence, saving the previously current one. */
4783
4784void
502b8322 4785push_topmost_sequence (void)
f15ae3a1 4786{
aefdd5ab 4787 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
4788
4789 start_sequence ();
4790
49ad7cfa 4791 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4792 top = stack;
4793
4794 first_insn = top->first;
4795 last_insn = top->last;
4796}
4797
4798/* After emitting to the outer-level insn chain, update the outer-level
4799 insn chain, and restore the previous saved state. */
4800
4801void
502b8322 4802pop_topmost_sequence (void)
f15ae3a1 4803{
aefdd5ab 4804 struct sequence_stack *stack, *top = NULL;
f15ae3a1 4805
49ad7cfa 4806 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4807 top = stack;
4808
4809 top->first = first_insn;
4810 top->last = last_insn;
4811
4812 end_sequence ();
4813}
4814
23b2ce53
RS
4815/* After emitting to a sequence, restore previous saved state.
4816
5c7a310f 4817 To get the contents of the sequence just made, you must call
2f937369 4818 `get_insns' *before* calling here.
5c7a310f
MM
4819
4820 If the compiler might have deferred popping arguments while
4821 generating this sequence, and this sequence will not be immediately
4822 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 4823 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
4824 pops are inserted into this sequence, and not into some random
4825 location in the instruction stream. See INHIBIT_DEFER_POP for more
4826 information about deferred popping of arguments. */
23b2ce53
RS
4827
4828void
502b8322 4829end_sequence (void)
23b2ce53 4830{
49ad7cfa 4831 struct sequence_stack *tem = seq_stack;
23b2ce53
RS
4832
4833 first_insn = tem->first;
4834 last_insn = tem->last;
49ad7cfa 4835 seq_stack = tem->next;
23b2ce53 4836
e2500fed
GK
4837 memset (tem, 0, sizeof (*tem));
4838 tem->next = free_sequence_stack;
4839 free_sequence_stack = tem;
23b2ce53
RS
4840}
4841
4842/* Return 1 if currently emitting into a sequence. */
4843
4844int
502b8322 4845in_sequence_p (void)
23b2ce53 4846{
49ad7cfa 4847 return seq_stack != 0;
23b2ce53 4848}
23b2ce53 4849\f
59ec66dc
MM
4850/* Put the various virtual registers into REGNO_REG_RTX. */
4851
2bbdec73 4852static void
502b8322 4853init_virtual_regs (struct emit_status *es)
59ec66dc 4854{
49ad7cfa
BS
4855 rtx *ptr = es->x_regno_reg_rtx;
4856 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4857 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4858 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4859 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4860 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4861}
4862
da43a810
BS
4863\f
4864/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4865static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4866static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4867static int copy_insn_n_scratches;
4868
4869/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4870 copied an ASM_OPERANDS.
4871 In that case, it is the original input-operand vector. */
4872static rtvec orig_asm_operands_vector;
4873
4874/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4875 copied an ASM_OPERANDS.
4876 In that case, it is the copied input-operand vector. */
4877static rtvec copy_asm_operands_vector;
4878
4879/* Likewise for the constraints vector. */
4880static rtvec orig_asm_constraints_vector;
4881static rtvec copy_asm_constraints_vector;
4882
4883/* Recursively create a new copy of an rtx for copy_insn.
4884 This function differs from copy_rtx in that it handles SCRATCHes and
4885 ASM_OPERANDs properly.
4886 Normally, this function is not used directly; use copy_insn as front end.
4887 However, you could first copy an insn pattern with copy_insn and then use
4888 this function afterwards to properly copy any REG_NOTEs containing
4889 SCRATCHes. */
4890
4891rtx
502b8322 4892copy_insn_1 (rtx orig)
da43a810 4893{
b3694847
SS
4894 rtx copy;
4895 int i, j;
4896 RTX_CODE code;
4897 const char *format_ptr;
da43a810
BS
4898
4899 code = GET_CODE (orig);
4900
4901 switch (code)
4902 {
4903 case REG:
da43a810
BS
4904 case CONST_INT:
4905 case CONST_DOUBLE:
69ef87e2 4906 case CONST_VECTOR:
da43a810
BS
4907 case SYMBOL_REF:
4908 case CODE_LABEL:
4909 case PC:
4910 case CC0:
da43a810 4911 return orig;
3e89ed8d
JH
4912 case CLOBBER:
4913 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4914 return orig;
4915 break;
da43a810
BS
4916
4917 case SCRATCH:
4918 for (i = 0; i < copy_insn_n_scratches; i++)
4919 if (copy_insn_scratch_in[i] == orig)
4920 return copy_insn_scratch_out[i];
4921 break;
4922
4923 case CONST:
4924 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4925 a LABEL_REF, it isn't sharable. */
4926 if (GET_CODE (XEXP (orig, 0)) == PLUS
4927 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4928 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4929 return orig;
4930 break;
750c9258 4931
da43a810
BS
4932 /* A MEM with a constant address is not sharable. The problem is that
4933 the constant address may need to be reloaded. If the mem is shared,
4934 then reloading one copy of this mem will cause all copies to appear
4935 to have been reloaded. */
4936
4937 default:
4938 break;
4939 }
4940
aacd3885
RS
4941 /* Copy the various flags, fields, and other information. We assume
4942 that all fields need copying, and then clear the fields that should
da43a810
BS
4943 not be copied. That is the sensible default behavior, and forces
4944 us to explicitly document why we are *not* copying a flag. */
aacd3885 4945 copy = shallow_copy_rtx (orig);
da43a810
BS
4946
4947 /* We do not copy the USED flag, which is used as a mark bit during
4948 walks over the RTL. */
2adc7f12 4949 RTX_FLAG (copy, used) = 0;
da43a810
BS
4950
4951 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 4952 if (INSN_P (orig))
da43a810 4953 {
2adc7f12
JJ
4954 RTX_FLAG (copy, jump) = 0;
4955 RTX_FLAG (copy, call) = 0;
4956 RTX_FLAG (copy, frame_related) = 0;
da43a810 4957 }
750c9258 4958
da43a810
BS
4959 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4960
4961 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
4962 switch (*format_ptr++)
4963 {
4964 case 'e':
4965 if (XEXP (orig, i) != NULL)
4966 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4967 break;
da43a810 4968
aacd3885
RS
4969 case 'E':
4970 case 'V':
4971 if (XVEC (orig, i) == orig_asm_constraints_vector)
4972 XVEC (copy, i) = copy_asm_constraints_vector;
4973 else if (XVEC (orig, i) == orig_asm_operands_vector)
4974 XVEC (copy, i) = copy_asm_operands_vector;
4975 else if (XVEC (orig, i) != NULL)
4976 {
4977 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4978 for (j = 0; j < XVECLEN (copy, i); j++)
4979 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4980 }
4981 break;
da43a810 4982
aacd3885
RS
4983 case 't':
4984 case 'w':
4985 case 'i':
4986 case 's':
4987 case 'S':
4988 case 'u':
4989 case '0':
4990 /* These are left unchanged. */
4991 break;
da43a810 4992
aacd3885
RS
4993 default:
4994 gcc_unreachable ();
4995 }
da43a810
BS
4996
4997 if (code == SCRATCH)
4998 {
4999 i = copy_insn_n_scratches++;
5b0264cb 5000 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5001 copy_insn_scratch_in[i] = orig;
5002 copy_insn_scratch_out[i] = copy;
5003 }
5004 else if (code == ASM_OPERANDS)
5005 {
6462bb43
AO
5006 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5007 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5008 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5009 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5010 }
5011
5012 return copy;
5013}
5014
5015/* Create a new copy of an rtx.
5016 This function differs from copy_rtx in that it handles SCRATCHes and
5017 ASM_OPERANDs properly.
5018 INSN doesn't really have to be a full INSN; it could be just the
5019 pattern. */
5020rtx
502b8322 5021copy_insn (rtx insn)
da43a810
BS
5022{
5023 copy_insn_n_scratches = 0;
5024 orig_asm_operands_vector = 0;
5025 orig_asm_constraints_vector = 0;
5026 copy_asm_operands_vector = 0;
5027 copy_asm_constraints_vector = 0;
5028 return copy_insn_1 (insn);
5029}
59ec66dc 5030
23b2ce53
RS
5031/* Initialize data structures and variables in this file
5032 before generating rtl for each function. */
5033
5034void
502b8322 5035init_emit (void)
23b2ce53 5036{
01d939e8 5037 struct function *f = cfun;
23b2ce53 5038
703ad42b 5039 f->emit = ggc_alloc (sizeof (struct emit_status));
23b2ce53
RS
5040 first_insn = NULL;
5041 last_insn = NULL;
5042 cur_insn_uid = 1;
5043 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 5044 last_location = UNKNOWN_LOCATION;
23b2ce53 5045 first_label_num = label_num;
49ad7cfa 5046 seq_stack = NULL;
23b2ce53 5047
23b2ce53
RS
5048 /* Init the tables that describe all the pseudo regs. */
5049
3502dc9c 5050 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5051
49ad7cfa 5052 f->emit->regno_pointer_align
703ad42b
KG
5053 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5054 * sizeof (unsigned char));
86fe05e0 5055
750c9258 5056 regno_reg_rtx
703ad42b 5057 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
0d4903b8 5058
e50126e8 5059 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876
JL
5060 memcpy (regno_reg_rtx,
5061 static_regno_reg_rtx,
5062 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5063
23b2ce53 5064 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
49ad7cfa 5065 init_virtual_regs (f->emit);
740ab4a2
RK
5066
5067 /* Indicate that the virtual registers and stack locations are
5068 all pointers. */
3502dc9c
JDA
5069 REG_POINTER (stack_pointer_rtx) = 1;
5070 REG_POINTER (frame_pointer_rtx) = 1;
5071 REG_POINTER (hard_frame_pointer_rtx) = 1;
5072 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5073
3502dc9c
JDA
5074 REG_POINTER (virtual_incoming_args_rtx) = 1;
5075 REG_POINTER (virtual_stack_vars_rtx) = 1;
5076 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5077 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5078 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5079
86fe05e0 5080#ifdef STACK_BOUNDARY
bdb429a5
RK
5081 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5082 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5083 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5084 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5085
5086 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5087 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5088 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5089 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5090 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5091#endif
5092
5e82e7bd
JVA
5093#ifdef INIT_EXPANDERS
5094 INIT_EXPANDERS;
5095#endif
23b2ce53
RS
5096}
5097
a73b091d 5098/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5099
5100static rtx
a73b091d 5101gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5102{
5103 rtx tem;
5104 rtvec v;
5105 int units, i;
5106 enum machine_mode inner;
5107
5108 units = GET_MODE_NUNITS (mode);
5109 inner = GET_MODE_INNER (mode);
5110
15ed7b52
JG
5111 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5112
69ef87e2
AH
5113 v = rtvec_alloc (units);
5114
a73b091d
JW
5115 /* We need to call this function after we set the scalar const_tiny_rtx
5116 entries. */
5117 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5118
5119 for (i = 0; i < units; ++i)
a73b091d 5120 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5121
a06e3c40 5122 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5123 return tem;
5124}
5125
a06e3c40 5126/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5127 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5128rtx
502b8322 5129gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5130{
a73b091d
JW
5131 enum machine_mode inner = GET_MODE_INNER (mode);
5132 int nunits = GET_MODE_NUNITS (mode);
5133 rtx x;
a06e3c40
R
5134 int i;
5135
a73b091d
JW
5136 /* Check to see if all of the elements have the same value. */
5137 x = RTVEC_ELT (v, nunits - 1);
5138 for (i = nunits - 2; i >= 0; i--)
5139 if (RTVEC_ELT (v, i) != x)
5140 break;
5141
5142 /* If the values are all the same, check to see if we can use one of the
5143 standard constant vectors. */
5144 if (i == -1)
5145 {
5146 if (x == CONST0_RTX (inner))
5147 return CONST0_RTX (mode);
5148 else if (x == CONST1_RTX (inner))
5149 return CONST1_RTX (mode);
5150 }
5151
5152 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5153}
5154
23b2ce53
RS
5155/* Create some permanent unique rtl objects shared between all functions.
5156 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5157
5158void
502b8322 5159init_emit_once (int line_numbers)
23b2ce53
RS
5160{
5161 int i;
5162 enum machine_mode mode;
9ec36da5 5163 enum machine_mode double_mode;
23b2ce53 5164
59e4e217 5165 /* We need reg_raw_mode, so initialize the modes now. */
28420116
PB
5166 init_reg_modes_once ();
5167
5692c7bc
ZW
5168 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5169 tables. */
17211ab5
GK
5170 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5171 const_int_htab_eq, NULL);
173b24b9 5172
17211ab5
GK
5173 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5174 const_double_htab_eq, NULL);
5692c7bc 5175
17211ab5
GK
5176 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5177 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5178 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5179 reg_attrs_htab_eq, NULL);
67673f5c 5180
23b2ce53
RS
5181 no_line_numbers = ! line_numbers;
5182
43fa6302
AS
5183 /* Compute the word and byte modes. */
5184
5185 byte_mode = VOIDmode;
5186 word_mode = VOIDmode;
5187 double_mode = VOIDmode;
5188
15ed7b52
JG
5189 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5190 mode != VOIDmode;
43fa6302
AS
5191 mode = GET_MODE_WIDER_MODE (mode))
5192 {
5193 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5194 && byte_mode == VOIDmode)
5195 byte_mode = mode;
5196
5197 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5198 && word_mode == VOIDmode)
5199 word_mode = mode;
5200 }
5201
15ed7b52
JG
5202 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5203 mode != VOIDmode;
43fa6302
AS
5204 mode = GET_MODE_WIDER_MODE (mode))
5205 {
5206 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5207 && double_mode == VOIDmode)
5208 double_mode = mode;
5209 }
5210
5211 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5212
5da077de
AS
5213 /* Assign register numbers to the globally defined register rtx.
5214 This must be done at runtime because the register number field
5215 is in a union and some compilers can't initialize unions. */
5216
2fb00d7f
KH
5217 pc_rtx = gen_rtx_PC (VOIDmode);
5218 cc0_rtx = gen_rtx_CC0 (VOIDmode);
08394eef
BS
5219 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5220 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5da077de 5221 if (hard_frame_pointer_rtx == 0)
750c9258 5222 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
08394eef 5223 HARD_FRAME_POINTER_REGNUM);
5da077de 5224 if (arg_pointer_rtx == 0)
08394eef 5225 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
750c9258 5226 virtual_incoming_args_rtx =
08394eef 5227 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
750c9258 5228 virtual_stack_vars_rtx =
08394eef 5229 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
750c9258 5230 virtual_stack_dynamic_rtx =
08394eef 5231 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
750c9258
AJ
5232 virtual_outgoing_args_rtx =
5233 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
08394eef 5234 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5da077de 5235
6cde4876
JL
5236 /* Initialize RTL for commonly used hard registers. These are
5237 copied into regno_reg_rtx as we begin to compile each function. */
5238 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5239 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5240
5da077de 5241#ifdef INIT_EXPANDERS
414c4dc4
NC
5242 /* This is to initialize {init|mark|free}_machine_status before the first
5243 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5244 end which calls push_function_context_to before the first call to
5da077de
AS
5245 init_function_start. */
5246 INIT_EXPANDERS;
5247#endif
5248
23b2ce53
RS
5249 /* Create the unique rtx's for certain rtx codes and operand values. */
5250
a2a8cc44 5251 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5252 tries to use these variables. */
23b2ce53 5253 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5254 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5255 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5256
68d75312
JC
5257 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5258 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5259 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5260 else
3b80f6ca 5261 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5262
5692c7bc
ZW
5263 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5264 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5265 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
f7657db9
KG
5266 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5267 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5692c7bc 5268 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
03f2ea93
RS
5269 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5270
5271 dconsthalf = dconst1;
1e92bbb9 5272 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5273
f7657db9
KG
5274 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5275
ab01a87c
KG
5276 /* Initialize mathematical constants for constant folding builtins.
5277 These constants need to be given to at least 160 bits precision. */
5278 real_from_string (&dconstpi,
5279 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5280 real_from_string (&dconste,
5281 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5282
f7657db9 5283 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5284 {
b216cd4a
ZW
5285 REAL_VALUE_TYPE *r =
5286 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5287
15ed7b52
JG
5288 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5289 mode != VOIDmode;
5290 mode = GET_MODE_WIDER_MODE (mode))
5291 const_tiny_rtx[i][(int) mode] =
5292 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5293
5294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5295 mode != VOIDmode;
23b2ce53 5296 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5297 const_tiny_rtx[i][(int) mode] =
5298 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5299
906c4e36 5300 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5301
15ed7b52
JG
5302 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5303 mode != VOIDmode;
23b2ce53 5304 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5305 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5306
5307 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5308 mode != VOIDmode;
5309 mode = GET_MODE_WIDER_MODE (mode))
5310 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5311 }
5312
69ef87e2
AH
5313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5314 mode != VOIDmode;
5315 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5316 {
5317 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5318 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5319 }
69ef87e2
AH
5320
5321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5322 mode != VOIDmode;
5323 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5324 {
5325 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5326 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5327 }
69ef87e2 5328
dbbbbf3b
JDA
5329 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5330 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5331 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5332
f0417c82
RH
5333 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5334 if (STORE_FLAG_VALUE == 1)
5335 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5336
a7e1e2ac
AO
5337#ifdef RETURN_ADDRESS_POINTER_REGNUM
5338 return_address_pointer_rtx
08394eef 5339 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
a7e1e2ac
AO
5340#endif
5341
a7e1e2ac
AO
5342#ifdef STATIC_CHAIN_REGNUM
5343 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5344
5345#ifdef STATIC_CHAIN_INCOMING_REGNUM
5346 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5347 static_chain_incoming_rtx
5348 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5349 else
5350#endif
5351 static_chain_incoming_rtx = static_chain_rtx;
5352#endif
5353
5354#ifdef STATIC_CHAIN
5355 static_chain_rtx = STATIC_CHAIN;
5356
5357#ifdef STATIC_CHAIN_INCOMING
5358 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5359#else
5360 static_chain_incoming_rtx = static_chain_rtx;
5361#endif
5362#endif
5363
fc555370 5364 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
751551d5 5365 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
23b2ce53 5366}
a11759a3 5367\f
969d70ca
JH
5368/* Produce exact duplicate of insn INSN after AFTER.
5369 Care updating of libcall regions if present. */
5370
5371rtx
502b8322 5372emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca
JH
5373{
5374 rtx new;
5375 rtx note1, note2, link;
5376
5377 switch (GET_CODE (insn))
5378 {
5379 case INSN:
5380 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5381 break;
5382
5383 case JUMP_INSN:
5384 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5385 break;
5386
5387 case CALL_INSN:
5388 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5389 if (CALL_INSN_FUNCTION_USAGE (insn))
5390 CALL_INSN_FUNCTION_USAGE (new)
5391 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5392 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5393 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5394 break;
5395
5396 default:
5b0264cb 5397 gcc_unreachable ();
969d70ca
JH
5398 }
5399
5400 /* Update LABEL_NUSES. */
5401 mark_jump_label (PATTERN (new), new, 0);
5402
0435312e 5403 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
ba4f7968 5404
0a3d71f5
JW
5405 /* If the old insn is frame related, then so is the new one. This is
5406 primarily needed for IA-64 unwind info which marks epilogue insns,
5407 which may be duplicated by the basic block reordering code. */
5408 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5409
969d70ca
JH
5410 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5411 make them. */
5412 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5413 if (REG_NOTE_KIND (link) != REG_LABEL)
5414 {
5415 if (GET_CODE (link) == EXPR_LIST)
5416 REG_NOTES (new)
5417 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5418 XEXP (link, 0),
5419 REG_NOTES (new)));
5420 else
5421 REG_NOTES (new)
5422 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5423 XEXP (link, 0),
5424 REG_NOTES (new)));
5425 }
5426
5427 /* Fix the libcall sequences. */
5428 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5429 {
5430 rtx p = new;
5431 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5432 p = PREV_INSN (p);
5433 XEXP (note1, 0) = p;
5434 XEXP (note2, 0) = new;
5435 }
6f0d3566 5436 INSN_CODE (new) = INSN_CODE (insn);
969d70ca
JH
5437 return new;
5438}
e2500fed 5439
1431042e 5440static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5441rtx
5442gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5443{
5444 if (hard_reg_clobbers[mode][regno])
5445 return hard_reg_clobbers[mode][regno];
5446 else
5447 return (hard_reg_clobbers[mode][regno] =
5448 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5449}
5450
e2500fed 5451#include "gt-emit-rtl.h"