]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
c-opts.c (c_common_post_options): Don't emit working directory in cpp output if ...
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
affad9a4 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
23b2ce53 4
1322177d 5This file is part of GCC.
23b2ce53 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
23b2ce53 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
23b2ce53
RS
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
23b2ce53
RS
21
22
23/* Middle-to-low level generation of rtx code and insns.
24
f822fcf7
KH
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
23b2ce53
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
01198c2f 40#include "toplev.h"
23b2ce53 41#include "rtl.h"
a25c7971 42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
23b2ce53
RS
44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
aff48bca 48#include "hard-reg-set.h"
c13e8210 49#include "hashtab.h"
23b2ce53 50#include "insn-config.h"
e9a25f70 51#include "recog.h"
23b2ce53 52#include "real.h"
0dfa1860 53#include "bitmap.h"
a05924f9 54#include "basic-block.h"
87ff9c8e 55#include "ggc.h"
e1772ac0 56#include "debug.h"
d23c55c2 57#include "langhooks.h"
ca695ac9 58
1d445e9e
ILT
59/* Commonly used modes. */
60
0f41302f
MS
61enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 63enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 64enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 65
23b2ce53
RS
66
67/* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
044b4de3 70static GTY(()) int label_num = 1;
23b2ce53 71
23b2ce53
RS
72/* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
75
76static int last_label_num;
77
f9bed9d3 78/* Value label_num had when set_new_last_label_num was called.
23b2ce53
RS
79 If label_num has not changed since then, last_label_num is valid. */
80
81static int base_label_num;
82
83/* Nonzero means do not generate NOTEs for source line numbers. */
84
85static int no_line_numbers;
86
87/* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
5692c7bc
ZW
89 All of these are unique; no other rtx-object will be equal to any
90 of these. */
23b2ce53 91
5da077de 92rtx global_rtl[GR_MAX];
23b2ce53 93
6cde4876
JL
94/* Commonly used RTL for hard registers. These objects are not necessarily
95 unique, so we allocate them separately from global_rtl. They are
96 initialized once per compilation unit, then copied into regno_reg_rtx
97 at the beginning of each function. */
98static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
99
4de249d9
PB
100rtx (*gen_lowpart) (enum machine_mode mode, rtx x) = gen_lowpart_general;
101
23b2ce53
RS
102/* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
68d75312
JC
108rtx const_true_rtx;
109
23b2ce53
RS
110REAL_VALUE_TYPE dconst0;
111REAL_VALUE_TYPE dconst1;
112REAL_VALUE_TYPE dconst2;
f7657db9
KG
113REAL_VALUE_TYPE dconst3;
114REAL_VALUE_TYPE dconst10;
23b2ce53 115REAL_VALUE_TYPE dconstm1;
03f2ea93
RS
116REAL_VALUE_TYPE dconstm2;
117REAL_VALUE_TYPE dconsthalf;
f7657db9 118REAL_VALUE_TYPE dconstthird;
ab01a87c
KG
119REAL_VALUE_TYPE dconstpi;
120REAL_VALUE_TYPE dconste;
23b2ce53
RS
121
122/* All references to the following fixed hard registers go through
123 these unique rtl objects. On machines where the frame-pointer and
124 arg-pointer are the same register, they use the same unique object.
125
126 After register allocation, other rtl objects which used to be pseudo-regs
127 may be clobbered to refer to the frame-pointer register.
128 But references that were originally to the frame-pointer can be
129 distinguished from the others because they contain frame_pointer_rtx.
130
ac6f08b0
DE
131 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
132 tricky: until register elimination has taken place hard_frame_pointer_rtx
750c9258 133 should be used if it is being set, and frame_pointer_rtx otherwise. After
ac6f08b0
DE
134 register elimination hard_frame_pointer_rtx should always be used.
135 On machines where the two registers are same (most) then these are the
136 same.
137
23b2ce53
RS
138 In an inline procedure, the stack and frame pointer rtxs may not be
139 used for anything else. */
23b2ce53
RS
140rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
141rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
142rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
143
a4417a86
JW
144/* This is used to implement __builtin_return_address for some machines.
145 See for instance the MIPS port. */
146rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
147
23b2ce53
RS
148/* We make one copy of (const_int C) where C is in
149 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
150 to save space during the compilation and simplify comparisons of
151 integers. */
152
5da077de 153rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 154
c13e8210
MM
155/* A hash table storing CONST_INTs whose absolute value is greater
156 than MAX_SAVED_CONST_INT. */
157
e2500fed
GK
158static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159 htab_t const_int_htab;
c13e8210 160
173b24b9 161/* A hash table storing memory attribute structures. */
e2500fed
GK
162static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
163 htab_t mem_attrs_htab;
173b24b9 164
a560d4d4
JH
165/* A hash table storing register attribute structures. */
166static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
167 htab_t reg_attrs_htab;
168
5692c7bc 169/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
170static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
171 htab_t const_double_htab;
5692c7bc 172
01d939e8
BS
173#define first_insn (cfun->emit->x_first_insn)
174#define last_insn (cfun->emit->x_last_insn)
175#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
fd3acbb3 176#define last_location (cfun->emit->x_last_location)
01d939e8 177#define first_label_num (cfun->emit->x_first_label_num)
23b2ce53 178
502b8322
AJ
179static rtx make_jump_insn_raw (rtx);
180static rtx make_call_insn_raw (rtx);
181static rtx find_line_note (rtx);
182static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
502b8322
AJ
183static void unshare_all_decls (tree);
184static void reset_used_decls (tree);
185static void mark_label_nuses (rtx);
186static hashval_t const_int_htab_hash (const void *);
187static int const_int_htab_eq (const void *, const void *);
188static hashval_t const_double_htab_hash (const void *);
189static int const_double_htab_eq (const void *, const void *);
190static rtx lookup_const_double (rtx);
191static hashval_t mem_attrs_htab_hash (const void *);
192static int mem_attrs_htab_eq (const void *, const void *);
193static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
194 enum machine_mode);
195static hashval_t reg_attrs_htab_hash (const void *);
196static int reg_attrs_htab_eq (const void *, const void *);
197static reg_attrs *get_reg_attrs (tree, int);
198static tree component_ref_for_mem_expr (tree);
199static rtx gen_const_vector_0 (enum machine_mode);
200static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
32b32b16 201static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 202
6b24c259
JH
203/* Probability of the conditional branch currently proceeded by try_split.
204 Set to -1 otherwise. */
205int split_branch_probability = -1;
ca695ac9 206\f
c13e8210
MM
207/* Returns a hash code for X (which is a really a CONST_INT). */
208
209static hashval_t
502b8322 210const_int_htab_hash (const void *x)
c13e8210 211{
bcda12f4 212 return (hashval_t) INTVAL ((rtx) x);
c13e8210
MM
213}
214
cc2902df 215/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
216 CONST_INT) is the same as that given by Y (which is really a
217 HOST_WIDE_INT *). */
218
219static int
502b8322 220const_int_htab_eq (const void *x, const void *y)
c13e8210 221{
5692c7bc
ZW
222 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
223}
224
225/* Returns a hash code for X (which is really a CONST_DOUBLE). */
226static hashval_t
502b8322 227const_double_htab_hash (const void *x)
5692c7bc 228{
5692c7bc 229 rtx value = (rtx) x;
46b33600 230 hashval_t h;
5692c7bc 231
46b33600
RH
232 if (GET_MODE (value) == VOIDmode)
233 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
234 else
fe352c29 235 {
15c812e3 236 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
237 /* MODE is used in the comparison, so it should be in the hash. */
238 h ^= GET_MODE (value);
239 }
5692c7bc
ZW
240 return h;
241}
242
cc2902df 243/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
244 is the same as that represented by Y (really a ...) */
245static int
502b8322 246const_double_htab_eq (const void *x, const void *y)
5692c7bc
ZW
247{
248 rtx a = (rtx)x, b = (rtx)y;
5692c7bc
ZW
249
250 if (GET_MODE (a) != GET_MODE (b))
251 return 0;
8580f7a0
RH
252 if (GET_MODE (a) == VOIDmode)
253 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
254 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
255 else
256 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
257 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
258}
259
173b24b9
RK
260/* Returns a hash code for X (which is a really a mem_attrs *). */
261
262static hashval_t
502b8322 263mem_attrs_htab_hash (const void *x)
173b24b9
RK
264{
265 mem_attrs *p = (mem_attrs *) x;
266
267 return (p->alias ^ (p->align * 1000)
268 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
269 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
998d7deb 270 ^ (size_t) p->expr);
173b24b9
RK
271}
272
cc2902df 273/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
274 mem_attrs *) is the same as that given by Y (which is also really a
275 mem_attrs *). */
c13e8210
MM
276
277static int
502b8322 278mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 279{
173b24b9
RK
280 mem_attrs *p = (mem_attrs *) x;
281 mem_attrs *q = (mem_attrs *) y;
282
998d7deb 283 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
173b24b9 284 && p->size == q->size && p->align == q->align);
c13e8210
MM
285}
286
173b24b9 287/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
288 one identical to it is not already in the table. We are doing this for
289 MEM of mode MODE. */
173b24b9
RK
290
291static mem_attrs *
502b8322
AJ
292get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
293 unsigned int align, enum machine_mode mode)
173b24b9
RK
294{
295 mem_attrs attrs;
296 void **slot;
297
bb056a77
OH
298 /* If everything is the default, we can just return zero.
299 This must match what the corresponding MEM_* macros return when the
300 field is not present. */
998d7deb 301 if (alias == 0 && expr == 0 && offset == 0
10b76d73
RK
302 && (size == 0
303 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
304 && (STRICT_ALIGNMENT && mode != BLKmode
305 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
306 return 0;
307
173b24b9 308 attrs.alias = alias;
998d7deb 309 attrs.expr = expr;
173b24b9
RK
310 attrs.offset = offset;
311 attrs.size = size;
312 attrs.align = align;
313
314 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
315 if (*slot == 0)
316 {
317 *slot = ggc_alloc (sizeof (mem_attrs));
318 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 }
320
321 return *slot;
c13e8210
MM
322}
323
a560d4d4
JH
324/* Returns a hash code for X (which is a really a reg_attrs *). */
325
326static hashval_t
502b8322 327reg_attrs_htab_hash (const void *x)
a560d4d4
JH
328{
329 reg_attrs *p = (reg_attrs *) x;
330
331 return ((p->offset * 1000) ^ (long) p->decl);
332}
333
6356f892 334/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
335 reg_attrs *) is the same as that given by Y (which is also really a
336 reg_attrs *). */
337
338static int
502b8322 339reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4
JH
340{
341 reg_attrs *p = (reg_attrs *) x;
342 reg_attrs *q = (reg_attrs *) y;
343
344 return (p->decl == q->decl && p->offset == q->offset);
345}
346/* Allocate a new reg_attrs structure and insert it into the hash table if
347 one identical to it is not already in the table. We are doing this for
348 MEM of mode MODE. */
349
350static reg_attrs *
502b8322 351get_reg_attrs (tree decl, int offset)
a560d4d4
JH
352{
353 reg_attrs attrs;
354 void **slot;
355
356 /* If everything is the default, we can just return zero. */
357 if (decl == 0 && offset == 0)
358 return 0;
359
360 attrs.decl = decl;
361 attrs.offset = offset;
362
363 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
364 if (*slot == 0)
365 {
366 *slot = ggc_alloc (sizeof (reg_attrs));
367 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 }
369
370 return *slot;
371}
372
08394eef
BS
373/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
374 don't attempt to share with the various global pieces of rtl (such as
375 frame_pointer_rtx). */
376
377rtx
502b8322 378gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
379{
380 rtx x = gen_rtx_raw_REG (mode, regno);
381 ORIGINAL_REGNO (x) = regno;
382 return x;
383}
384
c5c76735
JL
385/* There are some RTL codes that require special attention; the generation
386 functions do the raw handling. If you add to this list, modify
387 special_rtx in gengenrtl.c as well. */
388
3b80f6ca 389rtx
502b8322 390gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 391{
c13e8210
MM
392 void **slot;
393
3b80f6ca 394 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 395 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
396
397#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
398 if (const_true_rtx && arg == STORE_FLAG_VALUE)
399 return const_true_rtx;
400#endif
401
c13e8210 402 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
403 slot = htab_find_slot_with_hash (const_int_htab, &arg,
404 (hashval_t) arg, INSERT);
29105cea 405 if (*slot == 0)
1f8f4a0b 406 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
407
408 return (rtx) *slot;
3b80f6ca
RH
409}
410
2496c7bd 411rtx
502b8322 412gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
413{
414 return GEN_INT (trunc_int_for_mode (c, mode));
415}
416
5692c7bc
ZW
417/* CONST_DOUBLEs might be created from pairs of integers, or from
418 REAL_VALUE_TYPEs. Also, their length is known only at run time,
419 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
420
421/* Determine whether REAL, a CONST_DOUBLE, already exists in the
422 hash table. If so, return its counterpart; otherwise add it
423 to the hash table and return it. */
424static rtx
502b8322 425lookup_const_double (rtx real)
5692c7bc
ZW
426{
427 void **slot = htab_find_slot (const_double_htab, real, INSERT);
428 if (*slot == 0)
429 *slot = real;
430
431 return (rtx) *slot;
432}
29105cea 433
5692c7bc
ZW
434/* Return a CONST_DOUBLE rtx for a floating-point value specified by
435 VALUE in mode MODE. */
0133b7d9 436rtx
502b8322 437const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 438{
5692c7bc
ZW
439 rtx real = rtx_alloc (CONST_DOUBLE);
440 PUT_MODE (real, mode);
441
442 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
443
444 return lookup_const_double (real);
445}
446
447/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
448 of ints: I0 is the low-order word and I1 is the high-order word.
449 Do not use this routine for non-integer modes; convert to
450 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
451
452rtx
502b8322 453immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
454{
455 rtx value;
456 unsigned int i;
457
458 if (mode != VOIDmode)
459 {
460 int width;
461 if (GET_MODE_CLASS (mode) != MODE_INT
cb2a532e
AH
462 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
463 /* We can get a 0 for an error mark. */
464 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
465 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
5692c7bc
ZW
466 abort ();
467
468 /* We clear out all bits that don't belong in MODE, unless they and
469 our sign bit are all one. So we get either a reasonable negative
470 value or a reasonable unsigned value for this mode. */
471 width = GET_MODE_BITSIZE (mode);
472 if (width < HOST_BITS_PER_WIDE_INT
473 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
474 != ((HOST_WIDE_INT) (-1) << (width - 1))))
475 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
476 else if (width == HOST_BITS_PER_WIDE_INT
477 && ! (i1 == ~0 && i0 < 0))
478 i1 = 0;
479 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
480 /* We cannot represent this value as a constant. */
481 abort ();
482
483 /* If this would be an entire word for the target, but is not for
484 the host, then sign-extend on the host so that the number will
485 look the same way on the host that it would on the target.
486
487 For example, when building a 64 bit alpha hosted 32 bit sparc
488 targeted compiler, then we want the 32 bit unsigned value -1 to be
489 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
490 The latter confuses the sparc backend. */
491
492 if (width < HOST_BITS_PER_WIDE_INT
493 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
494 i0 |= ((HOST_WIDE_INT) (-1) << width);
2454beaf 495
5692c7bc
ZW
496 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
497 CONST_INT.
2454beaf 498
5692c7bc
ZW
499 ??? Strictly speaking, this is wrong if we create a CONST_INT for
500 a large unsigned constant with the size of MODE being
501 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
502 in a wider mode. In that case we will mis-interpret it as a
503 negative number.
2454beaf 504
5692c7bc
ZW
505 Unfortunately, the only alternative is to make a CONST_DOUBLE for
506 any constant in any mode if it is an unsigned constant larger
507 than the maximum signed integer in an int on the host. However,
508 doing this will break everyone that always expects to see a
509 CONST_INT for SImode and smaller.
510
511 We have always been making CONST_INTs in this case, so nothing
512 new is being broken. */
513
514 if (width <= HOST_BITS_PER_WIDE_INT)
515 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
516 }
517
518 /* If this integer fits in one word, return a CONST_INT. */
519 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
520 return GEN_INT (i0);
521
522 /* We use VOIDmode for integers. */
523 value = rtx_alloc (CONST_DOUBLE);
524 PUT_MODE (value, VOIDmode);
525
526 CONST_DOUBLE_LOW (value) = i0;
527 CONST_DOUBLE_HIGH (value) = i1;
528
529 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
530 XWINT (value, i) = 0;
531
532 return lookup_const_double (value);
0133b7d9
RH
533}
534
3b80f6ca 535rtx
502b8322 536gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
537{
538 /* In case the MD file explicitly references the frame pointer, have
539 all such references point to the same frame pointer. This is
540 used during frame pointer elimination to distinguish the explicit
541 references to these registers from pseudos that happened to be
542 assigned to them.
543
544 If we have eliminated the frame pointer or arg pointer, we will
545 be using it as a normal register, for example as a spill
546 register. In such cases, we might be accessing it in a mode that
547 is not Pmode and therefore cannot use the pre-allocated rtx.
548
549 Also don't do this when we are making new REGs in reload, since
550 we don't want to get confused with the real pointers. */
551
552 if (mode == Pmode && !reload_in_progress)
553 {
e10c79fe
LB
554 if (regno == FRAME_POINTER_REGNUM
555 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
556 return frame_pointer_rtx;
557#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e10c79fe
LB
558 if (regno == HARD_FRAME_POINTER_REGNUM
559 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
560 return hard_frame_pointer_rtx;
561#endif
562#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 563 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
564 return arg_pointer_rtx;
565#endif
566#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 567 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
568 return return_address_pointer_rtx;
569#endif
fc555370 570 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
2d67bd7b 571 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 572 return pic_offset_table_rtx;
bcb33994 573 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
574 return stack_pointer_rtx;
575 }
576
006a94b0 577#if 0
6cde4876 578 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
579 an existing entry in that table to avoid useless generation of RTL.
580
581 This code is disabled for now until we can fix the various backends
582 which depend on having non-shared hard registers in some cases. Long
583 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
584 on the amount of useless RTL that gets generated.
585
586 We'll also need to fix some code that runs after reload that wants to
587 set ORIGINAL_REGNO. */
588
6cde4876
JL
589 if (cfun
590 && cfun->emit
591 && regno_reg_rtx
592 && regno < FIRST_PSEUDO_REGISTER
593 && reg_raw_mode[regno] == mode)
594 return regno_reg_rtx[regno];
006a94b0 595#endif
6cde4876 596
08394eef 597 return gen_raw_REG (mode, regno);
3b80f6ca
RH
598}
599
41472af8 600rtx
502b8322 601gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
602{
603 rtx rt = gen_rtx_raw_MEM (mode, addr);
604
605 /* This field is not cleared by the mere allocation of the rtx, so
606 we clear it here. */
173b24b9 607 MEM_ATTRS (rt) = 0;
41472af8
MM
608
609 return rt;
610}
ddef6bc7
JJ
611
612rtx
502b8322 613gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
ddef6bc7
JJ
614{
615 /* This is the most common failure type.
616 Catch it early so we can see who does it. */
617 if ((offset % GET_MODE_SIZE (mode)) != 0)
618 abort ();
619
620 /* This check isn't usable right now because combine will
621 throw arbitrary crap like a CALL into a SUBREG in
622 gen_lowpart_for_combine so we must just eat it. */
623#if 0
624 /* Check for this too. */
625 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
626 abort ();
627#endif
5692c7bc 628 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
629}
630
173b24b9
RK
631/* Generate a SUBREG representing the least-significant part of REG if MODE
632 is smaller than mode of REG, otherwise paradoxical SUBREG. */
633
ddef6bc7 634rtx
502b8322 635gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
636{
637 enum machine_mode inmode;
ddef6bc7
JJ
638
639 inmode = GET_MODE (reg);
640 if (inmode == VOIDmode)
641 inmode = mode;
e0e08ac2
JH
642 return gen_rtx_SUBREG (mode, reg,
643 subreg_lowpart_offset (mode, inmode));
ddef6bc7 644}
c5c76735 645\f
23b2ce53
RS
646/* gen_rtvec (n, [rt1, ..., rtn])
647**
648** This routine creates an rtvec and stores within it the
649** pointers to rtx's which are its arguments.
650*/
651
652/*VARARGS1*/
653rtvec
e34d07f2 654gen_rtvec (int n, ...)
23b2ce53 655{
6268b922 656 int i, save_n;
23b2ce53 657 rtx *vector;
e34d07f2 658 va_list p;
23b2ce53 659
e34d07f2 660 va_start (p, n);
23b2ce53
RS
661
662 if (n == 0)
663 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
664
703ad42b 665 vector = alloca (n * sizeof (rtx));
4f90e4a0 666
23b2ce53
RS
667 for (i = 0; i < n; i++)
668 vector[i] = va_arg (p, rtx);
6268b922
KG
669
670 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
671 save_n = n;
e34d07f2 672 va_end (p);
23b2ce53 673
6268b922 674 return gen_rtvec_v (save_n, vector);
23b2ce53
RS
675}
676
677rtvec
502b8322 678gen_rtvec_v (int n, rtx *argp)
23b2ce53 679{
b3694847
SS
680 int i;
681 rtvec rt_val;
23b2ce53
RS
682
683 if (n == 0)
684 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
685
686 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
687
688 for (i = 0; i < n; i++)
8f985ec4 689 rt_val->elem[i] = *argp++;
23b2ce53
RS
690
691 return rt_val;
692}
693\f
694/* Generate a REG rtx for a new pseudo register of mode MODE.
695 This pseudo is assigned the next sequential register number. */
696
697rtx
502b8322 698gen_reg_rtx (enum machine_mode mode)
23b2ce53 699{
01d939e8 700 struct function *f = cfun;
b3694847 701 rtx val;
23b2ce53 702
f1db3576
JL
703 /* Don't let anything called after initial flow analysis create new
704 registers. */
705 if (no_new_pseudos)
23b2ce53
RS
706 abort ();
707
1b3d8f8a
GK
708 if (generating_concat_p
709 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
710 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
711 {
712 /* For complex modes, don't make a single pseudo.
713 Instead, make a CONCAT of two pseudos.
714 This allows noncontiguous allocation of the real and imaginary parts,
715 which makes much better code. Besides, allocating DCmode
716 pseudos overstrains reload on some machines like the 386. */
717 rtx realpart, imagpart;
27e58a70 718 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
719
720 realpart = gen_reg_rtx (partmode);
721 imagpart = gen_reg_rtx (partmode);
3b80f6ca 722 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
723 }
724
a560d4d4 725 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 726 enough to have an element for this pseudo reg number. */
23b2ce53 727
3502dc9c 728 if (reg_rtx_no == f->emit->regno_pointer_align_length)
23b2ce53 729 {
3502dc9c 730 int old_size = f->emit->regno_pointer_align_length;
e2ecd91c 731 char *new;
0d4903b8 732 rtx *new1;
0d4903b8 733
e2500fed 734 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
49ad7cfa 735 memset (new + old_size, 0, old_size);
f9e158c3 736 f->emit->regno_pointer_align = (unsigned char *) new;
49ad7cfa 737
703ad42b
KG
738 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
739 old_size * 2 * sizeof (rtx));
49ad7cfa 740 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
741 regno_reg_rtx = new1;
742
3502dc9c 743 f->emit->regno_pointer_align_length = old_size * 2;
23b2ce53
RS
744 }
745
08394eef 746 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
747 regno_reg_rtx[reg_rtx_no++] = val;
748 return val;
749}
750
dcc24678 751/* Generate a register with same attributes as REG,
a560d4d4
JH
752 but offsetted by OFFSET. */
753
754rtx
502b8322 755gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
a560d4d4
JH
756{
757 rtx new = gen_rtx_REG (mode, regno);
758 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
502b8322 759 REG_OFFSET (reg) + offset);
a560d4d4
JH
760 return new;
761}
762
763/* Set the decl for MEM to DECL. */
764
765void
502b8322 766set_reg_attrs_from_mem (rtx reg, rtx mem)
a560d4d4
JH
767{
768 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
769 REG_ATTRS (reg)
770 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
771}
772
9d18e06b
JZ
773/* Set the register attributes for registers contained in PARM_RTX.
774 Use needed values from memory attributes of MEM. */
775
776void
502b8322 777set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b
JZ
778{
779 if (GET_CODE (parm_rtx) == REG)
780 set_reg_attrs_from_mem (parm_rtx, mem);
781 else if (GET_CODE (parm_rtx) == PARALLEL)
782 {
783 /* Check for a NULL entry in the first slot, used to indicate that the
784 parameter goes both on the stack and in registers. */
785 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
786 for (; i < XVECLEN (parm_rtx, 0); i++)
787 {
788 rtx x = XVECEXP (parm_rtx, 0, i);
789 if (GET_CODE (XEXP (x, 0)) == REG)
790 REG_ATTRS (XEXP (x, 0))
791 = get_reg_attrs (MEM_EXPR (mem),
792 INTVAL (XEXP (x, 1)));
793 }
794 }
795}
796
a560d4d4
JH
797/* Assign the RTX X to declaration T. */
798void
502b8322 799set_decl_rtl (tree t, rtx x)
a560d4d4
JH
800{
801 DECL_CHECK (t)->decl.rtl = x;
802
803 if (!x)
804 return;
4d6922ee 805 /* For register, we maintain the reverse information too. */
a560d4d4
JH
806 if (GET_CODE (x) == REG)
807 REG_ATTRS (x) = get_reg_attrs (t, 0);
808 else if (GET_CODE (x) == SUBREG)
809 REG_ATTRS (SUBREG_REG (x))
810 = get_reg_attrs (t, -SUBREG_BYTE (x));
811 if (GET_CODE (x) == CONCAT)
812 {
813 if (REG_P (XEXP (x, 0)))
814 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
815 if (REG_P (XEXP (x, 1)))
816 REG_ATTRS (XEXP (x, 1))
817 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
818 }
819 if (GET_CODE (x) == PARALLEL)
820 {
821 int i;
822 for (i = 0; i < XVECLEN (x, 0); i++)
823 {
824 rtx y = XVECEXP (x, 0, i);
825 if (REG_P (XEXP (y, 0)))
826 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
827 }
828 }
829}
830
754fdcca
RK
831/* Identify REG (which may be a CONCAT) as a user register. */
832
833void
502b8322 834mark_user_reg (rtx reg)
754fdcca
RK
835{
836 if (GET_CODE (reg) == CONCAT)
837 {
838 REG_USERVAR_P (XEXP (reg, 0)) = 1;
839 REG_USERVAR_P (XEXP (reg, 1)) = 1;
840 }
841 else if (GET_CODE (reg) == REG)
842 REG_USERVAR_P (reg) = 1;
843 else
844 abort ();
845}
846
86fe05e0
RK
847/* Identify REG as a probable pointer register and show its alignment
848 as ALIGN, if nonzero. */
23b2ce53
RS
849
850void
502b8322 851mark_reg_pointer (rtx reg, int align)
23b2ce53 852{
3502dc9c 853 if (! REG_POINTER (reg))
00995e78 854 {
3502dc9c 855 REG_POINTER (reg) = 1;
86fe05e0 856
00995e78
RE
857 if (align)
858 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
859 }
860 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 861 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 862 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
863}
864
865/* Return 1 plus largest pseudo reg number used in the current function. */
866
867int
502b8322 868max_reg_num (void)
23b2ce53
RS
869{
870 return reg_rtx_no;
871}
872
873/* Return 1 + the largest label number used so far in the current function. */
874
875int
502b8322 876max_label_num (void)
23b2ce53
RS
877{
878 if (last_label_num && label_num == base_label_num)
879 return last_label_num;
880 return label_num;
881}
882
883/* Return first label number used in this function (if any were used). */
884
885int
502b8322 886get_first_label_num (void)
23b2ce53
RS
887{
888 return first_label_num;
889}
890\f
ddef6bc7
JJ
891/* Return the final regno of X, which is a SUBREG of a hard
892 register. */
893int
502b8322 894subreg_hard_regno (rtx x, int check_mode)
ddef6bc7
JJ
895{
896 enum machine_mode mode = GET_MODE (x);
897 unsigned int byte_offset, base_regno, final_regno;
898 rtx reg = SUBREG_REG (x);
899
900 /* This is where we attempt to catch illegal subregs
901 created by the compiler. */
902 if (GET_CODE (x) != SUBREG
903 || GET_CODE (reg) != REG)
904 abort ();
905 base_regno = REGNO (reg);
906 if (base_regno >= FIRST_PSEUDO_REGISTER)
907 abort ();
0607953c 908 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
ddef6bc7 909 abort ();
04c5580f
JH
910#ifdef ENABLE_CHECKING
911 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
502b8322 912 SUBREG_BYTE (x), mode))
04c5580f
JH
913 abort ();
914#endif
ddef6bc7
JJ
915 /* Catch non-congruent offsets too. */
916 byte_offset = SUBREG_BYTE (x);
917 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
918 abort ();
919
920 final_regno = subreg_regno (x);
921
922 return final_regno;
923}
924
23b2ce53
RS
925/* Return a value representing some low-order bits of X, where the number
926 of low-order bits is given by MODE. Note that no conversion is done
750c9258 927 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
928 representation is returned.
929
930 This function handles the cases in common between gen_lowpart, below,
931 and two variants in cse.c and combine.c. These are the cases that can
932 be safely handled at all points in the compilation.
933
934 If this is not a case we can handle, return 0. */
935
936rtx
502b8322 937gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 938{
ddef6bc7 939 int msize = GET_MODE_SIZE (mode);
550d1387 940 int xsize;
ddef6bc7 941 int offset = 0;
550d1387
GK
942 enum machine_mode innermode;
943
944 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
945 so we have to make one up. Yuk. */
946 innermode = GET_MODE (x);
947 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
948 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
949 else if (innermode == VOIDmode)
950 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
951
952 xsize = GET_MODE_SIZE (innermode);
953
954 if (innermode == VOIDmode || innermode == BLKmode)
955 abort ();
23b2ce53 956
550d1387 957 if (innermode == mode)
23b2ce53
RS
958 return x;
959
960 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
961 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
962 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
963 return 0;
964
53501a19 965 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
550d1387 966 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
53501a19
BS
967 return 0;
968
550d1387 969 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
970
971 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
972 && (GET_MODE_CLASS (mode) == MODE_INT
973 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
974 {
975 /* If we are getting the low-order part of something that has been
976 sign- or zero-extended, we can either just use the object being
977 extended or make a narrower extension. If we want an even smaller
978 piece than the size of the object being extended, call ourselves
979 recursively.
980
981 This case is used mostly by combine and cse. */
982
983 if (GET_MODE (XEXP (x, 0)) == mode)
984 return XEXP (x, 0);
550d1387 985 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 986 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 987 else if (msize < xsize)
3b80f6ca 988 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 989 }
76321db6 990 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
550d1387
GK
991 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
992 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
993 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 994
23b2ce53
RS
995 /* Otherwise, we can't do this. */
996 return 0;
997}
998\f
b1d673be
RS
999/* Return the constant real or imaginary part (which has mode MODE)
1000 of a complex value X. The IMAGPART_P argument determines whether
1001 the real or complex component should be returned. This function
1002 returns NULL_RTX if the component isn't a constant. */
1003
1004static rtx
502b8322 1005gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
b1d673be
RS
1006{
1007 tree decl, part;
1008
1009 if (GET_CODE (x) == MEM
4c2da7f2 1010 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
b1d673be
RS
1011 {
1012 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1013 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1014 {
1015 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1016 if (TREE_CODE (part) == REAL_CST
1017 || TREE_CODE (part) == INTEGER_CST)
1018 return expand_expr (part, NULL_RTX, mode, 0);
1019 }
1020 }
1021 return NULL_RTX;
1022}
1023
280194b0
RS
1024/* Return the real part (which has mode MODE) of a complex value X.
1025 This always comes at the low address in memory. */
1026
1027rtx
502b8322 1028gen_realpart (enum machine_mode mode, rtx x)
280194b0 1029{
b1d673be
RS
1030 rtx part;
1031
1032 /* Handle complex constants. */
1033 part = gen_complex_constant_part (mode, x, 0);
1034 if (part != NULL_RTX)
1035 return part;
1036
e0e08ac2
JH
1037 if (WORDS_BIG_ENDIAN
1038 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1039 && REG_P (x)
1040 && REGNO (x) < FIRST_PSEUDO_REGISTER)
400500c4 1041 internal_error
c725bd79 1042 ("can't access real part of complex value in hard register");
dc139c90 1043 else if (WORDS_BIG_ENDIAN)
280194b0
RS
1044 return gen_highpart (mode, x);
1045 else
1046 return gen_lowpart (mode, x);
1047}
1048
1049/* Return the imaginary part (which has mode MODE) of a complex value X.
1050 This always comes at the high address in memory. */
1051
1052rtx
502b8322 1053gen_imagpart (enum machine_mode mode, rtx x)
280194b0 1054{
b1d673be
RS
1055 rtx part;
1056
1057 /* Handle complex constants. */
1058 part = gen_complex_constant_part (mode, x, 1);
1059 if (part != NULL_RTX)
1060 return part;
1061
e0e08ac2 1062 if (WORDS_BIG_ENDIAN)
280194b0 1063 return gen_lowpart (mode, x);
ddef6bc7 1064 else if (! WORDS_BIG_ENDIAN
40c0c3cf
JL
1065 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1066 && REG_P (x)
1067 && REGNO (x) < FIRST_PSEUDO_REGISTER)
400500c4
RK
1068 internal_error
1069 ("can't access imaginary part of complex value in hard register");
280194b0
RS
1070 else
1071 return gen_highpart (mode, x);
1072}
81284a6a
JW
1073
1074/* Return 1 iff X, assumed to be a SUBREG,
1075 refers to the real part of the complex value in its containing reg.
1076 Complex values are always stored with the real part in the first word,
1077 regardless of WORDS_BIG_ENDIAN. */
1078
1079int
502b8322 1080subreg_realpart_p (rtx x)
81284a6a
JW
1081{
1082 if (GET_CODE (x) != SUBREG)
1083 abort ();
1084
ddef6bc7 1085 return ((unsigned int) SUBREG_BYTE (x)
c5898ca8 1086 < (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
81284a6a 1087}
280194b0 1088\f
23b2ce53
RS
1089/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1090 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1091 least-significant part of X.
1092 MODE specifies how big a part of X to return;
1093 it usually should not be larger than a word.
1094 If X is a MEM whose address is a QUEUED, the value may be so also. */
1095
1096rtx
4de249d9 1097gen_lowpart_general (enum machine_mode mode, rtx x)
23b2ce53
RS
1098{
1099 rtx result = gen_lowpart_common (mode, x);
1100
1101 if (result)
1102 return result;
ea8262b0
RK
1103 else if (GET_CODE (x) == REG)
1104 {
1105 /* Must be a hard reg that's not valid in MODE. */
1106 result = gen_lowpart_common (mode, copy_to_reg (x));
1107 if (result == 0)
1108 abort ();
72c3833b 1109 return result;
ea8262b0 1110 }
23b2ce53
RS
1111 else if (GET_CODE (x) == MEM)
1112 {
1113 /* The only additional case we can do is MEM. */
b3694847 1114 int offset = 0;
37f5242b
RS
1115
1116 /* The following exposes the use of "x" to CSE. */
1117 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
9dd04ab5 1118 && SCALAR_INT_MODE_P (GET_MODE (x))
90db942b
RS
1119 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1120 GET_MODE_BITSIZE (GET_MODE (x)))
37f5242b
RS
1121 && ! no_new_pseudos)
1122 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1123
23b2ce53
RS
1124 if (WORDS_BIG_ENDIAN)
1125 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1126 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1127
1128 if (BYTES_BIG_ENDIAN)
1129 /* Adjust the address so that the address-after-the-data
1130 is unchanged. */
1131 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1132 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1133
f4ef873c 1134 return adjust_address (x, mode, offset);
23b2ce53 1135 }
e9a25f70
JL
1136 else if (GET_CODE (x) == ADDRESSOF)
1137 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
23b2ce53
RS
1138 else
1139 abort ();
1140}
1141
750c9258 1142/* Like `gen_lowpart', but refer to the most significant part.
ccba022b
RS
1143 This is used to access the imaginary part of a complex number. */
1144
1145rtx
502b8322 1146gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1147{
ddef6bc7 1148 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1149 rtx result;
ddef6bc7 1150
ccba022b
RS
1151 /* This case loses if X is a subreg. To catch bugs early,
1152 complain if an invalid MODE is used even in other cases. */
ddef6bc7 1153 if (msize > UNITS_PER_WORD
c5898ca8 1154 && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
ccba022b 1155 abort ();
ddef6bc7 1156
e0e08ac2
JH
1157 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1158 subreg_highpart_offset (mode, GET_MODE (x)));
09482e0d
JW
1159
1160 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1161 the target if we have a MEM. gen_highpart must return a valid operand,
1162 emitting code if necessary to do so. */
13b8c631 1163 if (result != NULL_RTX && GET_CODE (result) == MEM)
09482e0d
JW
1164 result = validize_mem (result);
1165
e0e08ac2
JH
1166 if (!result)
1167 abort ();
1168 return result;
1169}
5222e470 1170
26d249eb 1171/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1172 be VOIDmode constant. */
1173rtx
502b8322 1174gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1175{
1176 if (GET_MODE (exp) != VOIDmode)
1177 {
1178 if (GET_MODE (exp) != innermode)
1179 abort ();
1180 return gen_highpart (outermode, exp);
1181 }
1182 return simplify_gen_subreg (outermode, exp, innermode,
1183 subreg_highpart_offset (outermode, innermode));
1184}
68252e27 1185
e0e08ac2
JH
1186/* Return offset in bytes to get OUTERMODE low part
1187 of the value in mode INNERMODE stored in memory in target format. */
8698cce3 1188
e0e08ac2 1189unsigned int
502b8322 1190subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1191{
1192 unsigned int offset = 0;
1193 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1194
e0e08ac2 1195 if (difference > 0)
ccba022b 1196 {
e0e08ac2
JH
1197 if (WORDS_BIG_ENDIAN)
1198 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1199 if (BYTES_BIG_ENDIAN)
1200 offset += difference % UNITS_PER_WORD;
ccba022b 1201 }
ddef6bc7 1202
e0e08ac2 1203 return offset;
ccba022b 1204}
eea50aa0 1205
e0e08ac2
JH
1206/* Return offset in bytes to get OUTERMODE high part
1207 of the value in mode INNERMODE stored in memory in target format. */
1208unsigned int
502b8322 1209subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1210{
1211 unsigned int offset = 0;
1212 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1213
e0e08ac2 1214 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
68252e27 1215 abort ();
e0e08ac2 1216
eea50aa0
JH
1217 if (difference > 0)
1218 {
e0e08ac2 1219 if (! WORDS_BIG_ENDIAN)
eea50aa0 1220 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1221 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1222 offset += difference % UNITS_PER_WORD;
1223 }
1224
e0e08ac2 1225 return offset;
eea50aa0 1226}
ccba022b 1227
23b2ce53
RS
1228/* Return 1 iff X, assumed to be a SUBREG,
1229 refers to the least significant part of its containing reg.
1230 If X is not a SUBREG, always return 1 (it is its own low part!). */
1231
1232int
502b8322 1233subreg_lowpart_p (rtx x)
23b2ce53
RS
1234{
1235 if (GET_CODE (x) != SUBREG)
1236 return 1;
a3a03040
RK
1237 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1238 return 0;
23b2ce53 1239
e0e08ac2
JH
1240 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1241 == SUBREG_BYTE (x));
23b2ce53
RS
1242}
1243\f
ddef6bc7
JJ
1244/* Return subword OFFSET of operand OP.
1245 The word number, OFFSET, is interpreted as the word number starting
1246 at the low-order address. OFFSET 0 is the low-order word if not
1247 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1248
1249 If we cannot extract the required word, we return zero. Otherwise,
1250 an rtx corresponding to the requested word will be returned.
1251
1252 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1253 reload has completed, a valid address will always be returned. After
1254 reload, if a valid address cannot be returned, we return zero.
1255
1256 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1257 it is the responsibility of the caller.
1258
1259 MODE is the mode of OP in case it is a CONST_INT.
1260
1261 ??? This is still rather broken for some cases. The problem for the
1262 moment is that all callers of this thing provide no 'goal mode' to
1263 tell us to work with. This exists because all callers were written
0631e0bf
JH
1264 in a word based SUBREG world.
1265 Now use of this function can be deprecated by simplify_subreg in most
1266 cases.
1267 */
ddef6bc7
JJ
1268
1269rtx
502b8322 1270operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1271{
1272 if (mode == VOIDmode)
1273 mode = GET_MODE (op);
1274
1275 if (mode == VOIDmode)
1276 abort ();
1277
30f7a378 1278 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1279 if (mode != BLKmode
1280 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1281 return 0;
1282
30f7a378 1283 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1284 if (mode != BLKmode
1285 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1286 return const0_rtx;
1287
ddef6bc7
JJ
1288 /* Form a new MEM at the requested address. */
1289 if (GET_CODE (op) == MEM)
1290 {
f1ec5147 1291 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1292
f1ec5147
RK
1293 if (! validate_address)
1294 return new;
1295
1296 else if (reload_completed)
ddef6bc7 1297 {
f1ec5147
RK
1298 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1299 return 0;
ddef6bc7 1300 }
f1ec5147
RK
1301 else
1302 return replace_equiv_address (new, XEXP (new, 0));
ddef6bc7
JJ
1303 }
1304
0631e0bf
JH
1305 /* Rest can be handled by simplify_subreg. */
1306 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1307}
1308
23b2ce53
RS
1309/* Similar to `operand_subword', but never return 0. If we can't extract
1310 the required subword, put OP into a register and try again. If that fails,
750c9258 1311 abort. We always validate the address in this case.
23b2ce53
RS
1312
1313 MODE is the mode of OP, in case it is CONST_INT. */
1314
1315rtx
502b8322 1316operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1317{
ddef6bc7 1318 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1319
1320 if (result)
1321 return result;
1322
1323 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1324 {
1325 /* If this is a register which can not be accessed by words, copy it
1326 to a pseudo register. */
1327 if (GET_CODE (op) == REG)
1328 op = copy_to_reg (op);
1329 else
1330 op = force_reg (mode, op);
1331 }
23b2ce53 1332
ddef6bc7 1333 result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1334 if (result == 0)
1335 abort ();
1336
1337 return result;
1338}
1339\f
1340/* Given a compare instruction, swap the operands.
1341 A test instruction is changed into a compare of 0 against the operand. */
1342
1343void
502b8322 1344reverse_comparison (rtx insn)
23b2ce53
RS
1345{
1346 rtx body = PATTERN (insn);
1347 rtx comp;
1348
1349 if (GET_CODE (body) == SET)
1350 comp = SET_SRC (body);
1351 else
1352 comp = SET_SRC (XVECEXP (body, 0, 0));
1353
1354 if (GET_CODE (comp) == COMPARE)
1355 {
1356 rtx op0 = XEXP (comp, 0);
1357 rtx op1 = XEXP (comp, 1);
1358 XEXP (comp, 0) = op1;
1359 XEXP (comp, 1) = op0;
1360 }
1361 else
1362 {
c5c76735
JL
1363 rtx new = gen_rtx_COMPARE (VOIDmode,
1364 CONST0_RTX (GET_MODE (comp)), comp);
23b2ce53
RS
1365 if (GET_CODE (body) == SET)
1366 SET_SRC (body) = new;
1367 else
1368 SET_SRC (XVECEXP (body, 0, 0)) = new;
1369 }
1370}
1371\f
998d7deb
RH
1372/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1373 or (2) a component ref of something variable. Represent the later with
1374 a NULL expression. */
1375
1376static tree
502b8322 1377component_ref_for_mem_expr (tree ref)
998d7deb
RH
1378{
1379 tree inner = TREE_OPERAND (ref, 0);
1380
1381 if (TREE_CODE (inner) == COMPONENT_REF)
1382 inner = component_ref_for_mem_expr (inner);
c56e3582
RK
1383 else
1384 {
1385 tree placeholder_ptr = 0;
1386
1387 /* Now remove any conversions: they don't change what the underlying
1388 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1389 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1390 || TREE_CODE (inner) == NON_LVALUE_EXPR
1391 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1392 || TREE_CODE (inner) == SAVE_EXPR
1393 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
68252e27
KH
1394 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1395 inner = find_placeholder (inner, &placeholder_ptr);
1396 else
1397 inner = TREE_OPERAND (inner, 0);
c56e3582
RK
1398
1399 if (! DECL_P (inner))
1400 inner = NULL_TREE;
1401 }
998d7deb
RH
1402
1403 if (inner == TREE_OPERAND (ref, 0))
1404 return ref;
1405 else
c56e3582
RK
1406 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1407 TREE_OPERAND (ref, 1));
998d7deb 1408}
173b24b9
RK
1409
1410/* Given REF, a MEM, and T, either the type of X or the expression
1411 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1412 if we are making a new object of this type. BITPOS is nonzero if
1413 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1414
1415void
502b8322
AJ
1416set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1417 HOST_WIDE_INT bitpos)
173b24b9 1418{
8ac61af7 1419 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
998d7deb 1420 tree expr = MEM_EXPR (ref);
8ac61af7
RK
1421 rtx offset = MEM_OFFSET (ref);
1422 rtx size = MEM_SIZE (ref);
1423 unsigned int align = MEM_ALIGN (ref);
6f1087be 1424 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1425 tree type;
1426
1427 /* It can happen that type_for_mode was given a mode for which there
1428 is no language-level type. In which case it returns NULL, which
1429 we can see here. */
1430 if (t == NULL_TREE)
1431 return;
1432
1433 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1434 if (type == error_mark_node)
1435 return;
173b24b9 1436
173b24b9
RK
1437 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1438 wrong answer, as it assumes that DECL_RTL already has the right alias
1439 info. Callers should not set DECL_RTL until after the call to
1440 set_mem_attributes. */
1441 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1442 abort ();
1443
738cc472 1444 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1445 front-end routine) and use it. */
1446 alias = get_alias_set (t);
173b24b9
RK
1447
1448 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1449 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
03bf2c23 1450 RTX_UNCHANGING_P (ref)
1285011e
RK
1451 |= ((lang_hooks.honor_readonly
1452 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1453 || (! TYPE_P (t) && TREE_CONSTANT (t)));
173b24b9 1454
8ac61af7
RK
1455 /* If we are making an object of this type, or if this is a DECL, we know
1456 that it is a scalar if the type is not an aggregate. */
1457 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
173b24b9
RK
1458 MEM_SCALAR_P (ref) = 1;
1459
c3d32120
RK
1460 /* We can set the alignment from the type if we are making an object,
1461 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1462 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1463 align = MAX (align, TYPE_ALIGN (type));
40c0668b 1464
738cc472
RK
1465 /* If the size is known, we can set that. */
1466 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1467 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1468
80965c18
RK
1469 /* If T is not a type, we may be able to deduce some more information about
1470 the expression. */
1471 if (! TYPE_P (t))
8ac61af7
RK
1472 {
1473 maybe_set_unchanging (ref, t);
1474 if (TREE_THIS_VOLATILE (t))
1475 MEM_VOLATILE_P (ref) = 1;
173b24b9 1476
c56e3582
RK
1477 /* Now remove any conversions: they don't change what the underlying
1478 object is. Likewise for SAVE_EXPR. */
8ac61af7 1479 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
c56e3582
RK
1480 || TREE_CODE (t) == NON_LVALUE_EXPR
1481 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1482 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1483 t = TREE_OPERAND (t, 0);
1484
10b76d73
RK
1485 /* If this expression can't be addressed (e.g., it contains a reference
1486 to a non-addressable field), show we don't change its alias set. */
1487 if (! can_address_p (t))
1488 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1489
8ac61af7
RK
1490 /* If this is a decl, set the attributes of the MEM from it. */
1491 if (DECL_P (t))
1492 {
998d7deb
RH
1493 expr = t;
1494 offset = const0_rtx;
6f1087be 1495 apply_bitpos = bitpos;
8ac61af7
RK
1496 size = (DECL_SIZE_UNIT (t)
1497 && host_integerp (DECL_SIZE_UNIT (t), 1)
1498 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1499 align = DECL_ALIGN (t);
8ac61af7
RK
1500 }
1501
40c0668b 1502 /* If this is a constant, we know the alignment. */
9ddfb1a7
RK
1503 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1504 {
1505 align = TYPE_ALIGN (type);
1506#ifdef CONSTANT_ALIGNMENT
1507 align = CONSTANT_ALIGNMENT (t, align);
1508#endif
1509 }
998d7deb
RH
1510
1511 /* If this is a field reference and not a bit-field, record it. */
1512 /* ??? There is some information that can be gleened from bit-fields,
1513 such as the word offset in the structure that might be modified.
1514 But skip it for now. */
1515 else if (TREE_CODE (t) == COMPONENT_REF
1516 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1517 {
1518 expr = component_ref_for_mem_expr (t);
1519 offset = const0_rtx;
6f1087be 1520 apply_bitpos = bitpos;
998d7deb
RH
1521 /* ??? Any reason the field size would be different than
1522 the size we got from the type? */
1523 }
1524
1525 /* If this is an array reference, look for an outer field reference. */
1526 else if (TREE_CODE (t) == ARRAY_REF)
1527 {
1528 tree off_tree = size_zero_node;
1b1838b6
JW
1529 /* We can't modify t, because we use it at the end of the
1530 function. */
1531 tree t2 = t;
998d7deb
RH
1532
1533 do
1534 {
1b1838b6
JW
1535 tree index = TREE_OPERAND (t2, 1);
1536 tree array = TREE_OPERAND (t2, 0);
2567406a
JH
1537 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1538 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1539 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1540
1541 /* We assume all arrays have sizes that are a multiple of a byte.
1542 First subtract the lower bound, if any, in the type of the
1543 index, then convert to sizetype and multiply by the size of the
1544 array element. */
1545 if (low_bound != 0 && ! integer_zerop (low_bound))
1546 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1547 index, low_bound));
1548
1549 /* If the index has a self-referential type, pass it to a
1550 WITH_RECORD_EXPR; if the component size is, pass our
1551 component to one. */
7a6cdb44 1552 if (CONTAINS_PLACEHOLDER_P (index))
1b1838b6 1553 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t2);
7a6cdb44 1554 if (CONTAINS_PLACEHOLDER_P (unit_size))
2567406a
JH
1555 unit_size = build (WITH_RECORD_EXPR, sizetype,
1556 unit_size, array);
1557
998d7deb
RH
1558 off_tree
1559 = fold (build (PLUS_EXPR, sizetype,
1560 fold (build (MULT_EXPR, sizetype,
2567406a
JH
1561 index,
1562 unit_size)),
998d7deb 1563 off_tree));
1b1838b6 1564 t2 = TREE_OPERAND (t2, 0);
998d7deb 1565 }
1b1838b6 1566 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1567
1b1838b6 1568 if (DECL_P (t2))
c67a1cf6 1569 {
1b1838b6 1570 expr = t2;
40cb04f1 1571 offset = NULL;
c67a1cf6 1572 if (host_integerp (off_tree, 1))
40cb04f1
RH
1573 {
1574 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1575 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1576 align = DECL_ALIGN (t2);
fc555370 1577 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1
RH
1578 align = aoff;
1579 offset = GEN_INT (ioff);
6f1087be 1580 apply_bitpos = bitpos;
40cb04f1 1581 }
c67a1cf6 1582 }
1b1838b6 1583 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1584 {
1b1838b6 1585 expr = component_ref_for_mem_expr (t2);
998d7deb 1586 if (host_integerp (off_tree, 1))
6f1087be
RH
1587 {
1588 offset = GEN_INT (tree_low_cst (off_tree, 1));
1589 apply_bitpos = bitpos;
1590 }
998d7deb
RH
1591 /* ??? Any reason the field size would be different than
1592 the size we got from the type? */
1593 }
c67a1cf6 1594 else if (flag_argument_noalias > 1
1b1838b6
JW
1595 && TREE_CODE (t2) == INDIRECT_REF
1596 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
c67a1cf6 1597 {
1b1838b6 1598 expr = t2;
c67a1cf6
RH
1599 offset = NULL;
1600 }
1601 }
1602
1603 /* If this is a Fortran indirect argument reference, record the
1604 parameter decl. */
1605 else if (flag_argument_noalias > 1
1606 && TREE_CODE (t) == INDIRECT_REF
1607 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1608 {
1609 expr = t;
1610 offset = NULL;
998d7deb 1611 }
8ac61af7
RK
1612 }
1613
15c812e3 1614 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1615 bit position offset. Similarly, increase the size of the accessed
1616 object to contain the negative offset. */
6f1087be 1617 if (apply_bitpos)
8c317c5f
RH
1618 {
1619 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1620 if (size)
1621 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1622 }
6f1087be 1623
8ac61af7 1624 /* Now set the attributes we computed above. */
10b76d73 1625 MEM_ATTRS (ref)
998d7deb 1626 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
8ac61af7
RK
1627
1628 /* If this is already known to be a scalar or aggregate, we are done. */
1629 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1630 return;
1631
8ac61af7
RK
1632 /* If it is a reference into an aggregate, this is part of an aggregate.
1633 Otherwise we don't know. */
173b24b9
RK
1634 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1635 || TREE_CODE (t) == ARRAY_RANGE_REF
1636 || TREE_CODE (t) == BIT_FIELD_REF)
1637 MEM_IN_STRUCT_P (ref) = 1;
1638}
1639
6f1087be 1640void
502b8322 1641set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1642{
1643 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1644}
1645
a560d4d4
JH
1646/* Set the decl for MEM to DECL. */
1647
1648void
502b8322 1649set_mem_attrs_from_reg (rtx mem, rtx reg)
a560d4d4
JH
1650{
1651 MEM_ATTRS (mem)
1652 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1653 GEN_INT (REG_OFFSET (reg)),
1654 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1655}
1656
173b24b9
RK
1657/* Set the alias set of MEM to SET. */
1658
1659void
502b8322 1660set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
173b24b9 1661{
68252e27 1662#ifdef ENABLE_CHECKING
173b24b9
RK
1663 /* If the new and old alias sets don't conflict, something is wrong. */
1664 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1665 abort ();
173b24b9
RK
1666#endif
1667
998d7deb 1668 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73
RK
1669 MEM_SIZE (mem), MEM_ALIGN (mem),
1670 GET_MODE (mem));
173b24b9 1671}
738cc472 1672
d022d93e 1673/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1674
1675void
502b8322 1676set_mem_align (rtx mem, unsigned int align)
738cc472 1677{
998d7deb 1678 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73
RK
1679 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1680 GET_MODE (mem));
738cc472 1681}
1285011e 1682
998d7deb 1683/* Set the expr for MEM to EXPR. */
1285011e
RK
1684
1685void
502b8322 1686set_mem_expr (rtx mem, tree expr)
1285011e
RK
1687{
1688 MEM_ATTRS (mem)
998d7deb 1689 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1285011e
RK
1690 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1691}
998d7deb
RH
1692
1693/* Set the offset of MEM to OFFSET. */
1694
1695void
502b8322 1696set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
1697{
1698 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1699 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1700 GET_MODE (mem));
35aff10b
AM
1701}
1702
1703/* Set the size of MEM to SIZE. */
1704
1705void
502b8322 1706set_mem_size (rtx mem, rtx size)
35aff10b
AM
1707{
1708 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1709 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1710 GET_MODE (mem));
998d7deb 1711}
173b24b9 1712\f
738cc472
RK
1713/* Return a memory reference like MEMREF, but with its mode changed to MODE
1714 and its address changed to ADDR. (VOIDmode means don't change the mode.
1715 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1716 returned memory location is required to be valid. The memory
1717 attributes are not changed. */
23b2ce53 1718
738cc472 1719static rtx
502b8322 1720change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53
RS
1721{
1722 rtx new;
1723
1724 if (GET_CODE (memref) != MEM)
1725 abort ();
1726 if (mode == VOIDmode)
1727 mode = GET_MODE (memref);
1728 if (addr == 0)
1729 addr = XEXP (memref, 0);
a74ff877
JH
1730 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1731 && (!validate || memory_address_p (mode, addr)))
1732 return memref;
23b2ce53 1733
f1ec5147 1734 if (validate)
23b2ce53 1735 {
f1ec5147
RK
1736 if (reload_in_progress || reload_completed)
1737 {
1738 if (! memory_address_p (mode, addr))
1739 abort ();
1740 }
1741 else
1742 addr = memory_address (mode, addr);
23b2ce53 1743 }
750c9258 1744
9b04c6a8
RK
1745 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1746 return memref;
1747
3b80f6ca 1748 new = gen_rtx_MEM (mode, addr);
c6df88cb 1749 MEM_COPY_ATTRIBUTES (new, memref);
23b2ce53
RS
1750 return new;
1751}
792760b9 1752
738cc472
RK
1753/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1754 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1755
1756rtx
502b8322 1757change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1758{
4e44c1ef 1759 rtx new = change_address_1 (memref, mode, addr, 1), size;
738cc472 1760 enum machine_mode mmode = GET_MODE (new);
4e44c1ef
JJ
1761 unsigned int align;
1762
1763 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1764 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
c2f7bcc3 1765
fdb1c7b3
JH
1766 /* If there are no changes, just return the original memory reference. */
1767 if (new == memref)
4e44c1ef
JJ
1768 {
1769 if (MEM_ATTRS (memref) == 0
1770 || (MEM_EXPR (memref) == NULL
1771 && MEM_OFFSET (memref) == NULL
1772 && MEM_SIZE (memref) == size
1773 && MEM_ALIGN (memref) == align))
1774 return new;
1775
64fc7c00 1776 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
4e44c1ef
JJ
1777 MEM_COPY_ATTRIBUTES (new, memref);
1778 }
fdb1c7b3 1779
738cc472 1780 MEM_ATTRS (new)
4e44c1ef 1781 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
823e3574 1782
738cc472 1783 return new;
f4ef873c 1784}
792760b9 1785
738cc472
RK
1786/* Return a memory reference like MEMREF, but with its mode changed
1787 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
1788 nonzero, the memory address is forced to be valid.
1789 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1790 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
1791
1792rtx
502b8322
AJ
1793adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1794 int validate, int adjust)
f1ec5147 1795{
823e3574 1796 rtx addr = XEXP (memref, 0);
738cc472
RK
1797 rtx new;
1798 rtx memoffset = MEM_OFFSET (memref);
10b76d73 1799 rtx size = 0;
738cc472 1800 unsigned int memalign = MEM_ALIGN (memref);
823e3574 1801
fdb1c7b3
JH
1802 /* If there are no changes, just return the original memory reference. */
1803 if (mode == GET_MODE (memref) && !offset
1804 && (!validate || memory_address_p (mode, addr)))
1805 return memref;
1806
d14419e4 1807 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 1808 This may happen even if offset is nonzero -- consider
d14419e4
RH
1809 (plus (plus reg reg) const_int) -- so do this always. */
1810 addr = copy_rtx (addr);
1811
4a78c787
RH
1812 if (adjust)
1813 {
1814 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1815 object, we can merge it into the LO_SUM. */
1816 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1817 && offset >= 0
1818 && (unsigned HOST_WIDE_INT) offset
1819 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1820 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1821 plus_constant (XEXP (addr, 1), offset));
1822 else
1823 addr = plus_constant (addr, offset);
1824 }
823e3574 1825
738cc472
RK
1826 new = change_address_1 (memref, mode, addr, validate);
1827
1828 /* Compute the new values of the memory attributes due to this adjustment.
1829 We add the offsets and update the alignment. */
1830 if (memoffset)
1831 memoffset = GEN_INT (offset + INTVAL (memoffset));
1832
03bf2c23
RK
1833 /* Compute the new alignment by taking the MIN of the alignment and the
1834 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1835 if zero. */
1836 if (offset != 0)
3bf1e984
RK
1837 memalign
1838 = MIN (memalign,
1839 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 1840
10b76d73 1841 /* We can compute the size in a number of ways. */
a06ef755
RK
1842 if (GET_MODE (new) != BLKmode)
1843 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
10b76d73
RK
1844 else if (MEM_SIZE (memref))
1845 size = plus_constant (MEM_SIZE (memref), -offset);
1846
998d7deb 1847 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
10b76d73 1848 memoffset, size, memalign, GET_MODE (new));
738cc472
RK
1849
1850 /* At some point, we should validate that this offset is within the object,
1851 if all the appropriate values are known. */
1852 return new;
f1ec5147
RK
1853}
1854
630036c6
JJ
1855/* Return a memory reference like MEMREF, but with its mode changed
1856 to MODE and its address changed to ADDR, which is assumed to be
1857 MEMREF offseted by OFFSET bytes. If VALIDATE is
1858 nonzero, the memory address is forced to be valid. */
1859
1860rtx
502b8322
AJ
1861adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1862 HOST_WIDE_INT offset, int validate)
630036c6
JJ
1863{
1864 memref = change_address_1 (memref, VOIDmode, addr, validate);
1865 return adjust_address_1 (memref, mode, offset, validate, 0);
1866}
1867
8ac61af7
RK
1868/* Return a memory reference like MEMREF, but whose address is changed by
1869 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1870 known to be in OFFSET (possibly 1). */
0d4903b8
RK
1871
1872rtx
502b8322 1873offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 1874{
e3c8ea67
RH
1875 rtx new, addr = XEXP (memref, 0);
1876
1877 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1878
68252e27 1879 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 1880 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
1881
1882 However, if we did go and rearrange things, we can wind up not
1883 being able to recognize the magic around pic_offset_table_rtx.
1884 This stuff is fragile, and is yet another example of why it is
1885 bad to expose PIC machinery too early. */
1886 if (! memory_address_p (GET_MODE (memref), new)
1887 && GET_CODE (addr) == PLUS
1888 && XEXP (addr, 0) == pic_offset_table_rtx)
1889 {
1890 addr = force_reg (GET_MODE (addr), addr);
1891 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1892 }
1893
f6041ed8 1894 update_temp_slot_address (XEXP (memref, 0), new);
e3c8ea67 1895 new = change_address_1 (memref, VOIDmode, new, 1);
0d4903b8 1896
fdb1c7b3
JH
1897 /* If there are no changes, just return the original memory reference. */
1898 if (new == memref)
1899 return new;
1900
0d4903b8
RK
1901 /* Update the alignment to reflect the offset. Reset the offset, which
1902 we don't know. */
2cc2d4bb
RK
1903 MEM_ATTRS (new)
1904 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 1905 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2cc2d4bb 1906 GET_MODE (new));
0d4903b8
RK
1907 return new;
1908}
68252e27 1909
792760b9
RK
1910/* Return a memory reference like MEMREF, but with its address changed to
1911 ADDR. The caller is asserting that the actual piece of memory pointed
1912 to is the same, just the form of the address is being changed, such as
1913 by putting something into a register. */
1914
1915rtx
502b8322 1916replace_equiv_address (rtx memref, rtx addr)
792760b9 1917{
738cc472
RK
1918 /* change_address_1 copies the memory attribute structure without change
1919 and that's exactly what we want here. */
40c0668b 1920 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 1921 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 1922}
738cc472 1923
f1ec5147
RK
1924/* Likewise, but the reference is not required to be valid. */
1925
1926rtx
502b8322 1927replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 1928{
f1ec5147
RK
1929 return change_address_1 (memref, VOIDmode, addr, 0);
1930}
e7dfe4bb
RH
1931
1932/* Return a memory reference like MEMREF, but with its mode widened to
1933 MODE and offset by OFFSET. This would be used by targets that e.g.
1934 cannot issue QImode memory operations and have to use SImode memory
1935 operations plus masking logic. */
1936
1937rtx
502b8322 1938widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb
RH
1939{
1940 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1941 tree expr = MEM_EXPR (new);
1942 rtx memoffset = MEM_OFFSET (new);
1943 unsigned int size = GET_MODE_SIZE (mode);
1944
fdb1c7b3
JH
1945 /* If there are no changes, just return the original memory reference. */
1946 if (new == memref)
1947 return new;
1948
e7dfe4bb
RH
1949 /* If we don't know what offset we were at within the expression, then
1950 we can't know if we've overstepped the bounds. */
fa1591cb 1951 if (! memoffset)
e7dfe4bb
RH
1952 expr = NULL_TREE;
1953
1954 while (expr)
1955 {
1956 if (TREE_CODE (expr) == COMPONENT_REF)
1957 {
1958 tree field = TREE_OPERAND (expr, 1);
1959
1960 if (! DECL_SIZE_UNIT (field))
1961 {
1962 expr = NULL_TREE;
1963 break;
1964 }
1965
1966 /* Is the field at least as large as the access? If so, ok,
1967 otherwise strip back to the containing structure. */
03667700
RK
1968 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
1969 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
1970 && INTVAL (memoffset) >= 0)
1971 break;
1972
1973 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
1974 {
1975 expr = NULL_TREE;
1976 break;
1977 }
1978
1979 expr = TREE_OPERAND (expr, 0);
1980 memoffset = (GEN_INT (INTVAL (memoffset)
1981 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1982 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1983 / BITS_PER_UNIT)));
1984 }
1985 /* Similarly for the decl. */
1986 else if (DECL_P (expr)
1987 && DECL_SIZE_UNIT (expr)
45f79783 1988 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
1989 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
1990 && (! memoffset || INTVAL (memoffset) >= 0))
1991 break;
1992 else
1993 {
1994 /* The widened memory access overflows the expression, which means
1995 that it could alias another expression. Zap it. */
1996 expr = NULL_TREE;
1997 break;
1998 }
1999 }
2000
2001 if (! expr)
2002 memoffset = NULL_RTX;
2003
2004 /* The widened memory may alias other stuff, so zap the alias set. */
2005 /* ??? Maybe use get_alias_set on any remaining expression. */
2006
2007 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2008 MEM_ALIGN (new), mode);
2009
2010 return new;
2011}
23b2ce53
RS
2012\f
2013/* Return a newly created CODE_LABEL rtx with a unique label number. */
2014
2015rtx
502b8322 2016gen_label_rtx (void)
23b2ce53 2017{
0dc36574 2018 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2019 NULL, label_num++, NULL);
23b2ce53
RS
2020}
2021\f
2022/* For procedure integration. */
2023
23b2ce53 2024/* Install new pointers to the first and last insns in the chain.
86fe05e0 2025 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2026 Used for an inline-procedure after copying the insn chain. */
2027
2028void
502b8322 2029set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2030{
86fe05e0
RK
2031 rtx insn;
2032
23b2ce53
RS
2033 first_insn = first;
2034 last_insn = last;
86fe05e0
RK
2035 cur_insn_uid = 0;
2036
2037 for (insn = first; insn; insn = NEXT_INSN (insn))
2038 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2039
2040 cur_insn_uid++;
23b2ce53
RS
2041}
2042
49ad7cfa
BS
2043/* Set the last label number found in the current function.
2044 This is used when belatedly compiling an inline function. */
23b2ce53
RS
2045
2046void
502b8322 2047set_new_last_label_num (int last)
23b2ce53 2048{
49ad7cfa
BS
2049 base_label_num = label_num;
2050 last_label_num = last;
23b2ce53 2051}
49ad7cfa 2052\f
23b2ce53
RS
2053/* Restore all variables describing the current status from the structure *P.
2054 This is used after a nested function. */
2055
2056void
502b8322 2057restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
23b2ce53 2058{
457a2d9c 2059 last_label_num = 0;
23b2ce53
RS
2060}
2061\f
750c9258 2062/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2063 structure. This routine should only be called once. */
23b2ce53
RS
2064
2065void
502b8322 2066unshare_all_rtl (tree fndecl, rtx insn)
23b2ce53 2067{
d1b81779 2068 tree decl;
23b2ce53 2069
d1b81779
GK
2070 /* Make sure that virtual parameters are not shared. */
2071 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
19e7881c 2072 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
d1b81779 2073
5c6df058
AO
2074 /* Make sure that virtual stack slots are not shared. */
2075 unshare_all_decls (DECL_INITIAL (fndecl));
2076
d1b81779 2077 /* Unshare just about everything else. */
2c07f13b 2078 unshare_all_rtl_in_chain (insn);
750c9258 2079
23b2ce53
RS
2080 /* Make sure the addresses of stack slots found outside the insn chain
2081 (such as, in DECL_RTL of a variable) are not shared
2082 with the insn chain.
2083
2084 This special care is necessary when the stack slot MEM does not
2085 actually appear in the insn chain. If it does appear, its address
2086 is unshared from all else at that point. */
242b0ce6 2087 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2088}
2089
750c9258 2090/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2091 structure, again. This is a fairly expensive thing to do so it
2092 should be done sparingly. */
2093
2094void
502b8322 2095unshare_all_rtl_again (rtx insn)
d1b81779
GK
2096{
2097 rtx p;
624c87aa
RE
2098 tree decl;
2099
d1b81779 2100 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2101 if (INSN_P (p))
d1b81779
GK
2102 {
2103 reset_used_flags (PATTERN (p));
2104 reset_used_flags (REG_NOTES (p));
2105 reset_used_flags (LOG_LINKS (p));
2106 }
624c87aa 2107
2d4aecb3
AO
2108 /* Make sure that virtual stack slots are not shared. */
2109 reset_used_decls (DECL_INITIAL (cfun->decl));
2110
624c87aa
RE
2111 /* Make sure that virtual parameters are not shared. */
2112 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2113 reset_used_flags (DECL_RTL (decl));
2114
2115 reset_used_flags (stack_slot_list);
2116
2117 unshare_all_rtl (cfun->decl, insn);
d1b81779
GK
2118}
2119
2c07f13b
JH
2120/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2121 Recursively does the same for subexpressions. */
2122
2123static void
2124verify_rtx_sharing (rtx orig, rtx insn)
2125{
2126 rtx x = orig;
2127 int i;
2128 enum rtx_code code;
2129 const char *format_ptr;
2130
2131 if (x == 0)
2132 return;
2133
2134 code = GET_CODE (x);
2135
2136 /* These types may be freely shared. */
2137
2138 switch (code)
2139 {
2140 case REG:
2141 case QUEUED:
2142 case CONST_INT:
2143 case CONST_DOUBLE:
2144 case CONST_VECTOR:
2145 case SYMBOL_REF:
2146 case LABEL_REF:
2147 case CODE_LABEL:
2148 case PC:
2149 case CC0:
2150 case SCRATCH:
2c07f13b 2151 return;
3e89ed8d
JH
2152 /* SCRATCH must be shared because they represent distinct values. */
2153 case CLOBBER:
2154 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2155 return;
2156 break;
2c07f13b
JH
2157
2158 case CONST:
2159 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2160 a LABEL_REF, it isn't sharable. */
2161 if (GET_CODE (XEXP (x, 0)) == PLUS
2162 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2163 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2164 return;
2165 break;
2166
2167 case MEM:
2168 /* A MEM is allowed to be shared if its address is constant. */
2169 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2170 || reload_completed || reload_in_progress)
2171 return;
2172
2173 break;
2174
2175 default:
2176 break;
2177 }
2178
2179 /* This rtx may not be shared. If it has already been seen,
2180 replace it with a copy of itself. */
2181
2182 if (RTX_FLAG (x, used))
2183 {
2184 error ("Invalid rtl sharing found in the insn");
2185 debug_rtx (insn);
2186 error ("Shared rtx");
2187 debug_rtx (x);
2188 abort ();
2189 }
2190 RTX_FLAG (x, used) = 1;
2191
6614fd40 2192 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2193
2194 format_ptr = GET_RTX_FORMAT (code);
2195
2196 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2197 {
2198 switch (*format_ptr++)
2199 {
2200 case 'e':
2201 verify_rtx_sharing (XEXP (x, i), insn);
2202 break;
2203
2204 case 'E':
2205 if (XVEC (x, i) != NULL)
2206 {
2207 int j;
2208 int len = XVECLEN (x, i);
2209
2210 for (j = 0; j < len; j++)
2211 {
2212 /* We allow sharing of ASM_OPERANDS inside single instruction. */
2213 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2214 && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
2215 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2216 else
2217 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2218 }
2219 }
2220 break;
2221 }
2222 }
2223 return;
2224}
2225
ba228239 2226/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2227 sharing in between the subexpressions. */
2228
2229void
2230verify_rtl_sharing (void)
2231{
2232 rtx p;
2233
2234 for (p = get_insns (); p; p = NEXT_INSN (p))
2235 if (INSN_P (p))
2236 {
2237 reset_used_flags (PATTERN (p));
2238 reset_used_flags (REG_NOTES (p));
2239 reset_used_flags (LOG_LINKS (p));
2240 }
2241
2242 for (p = get_insns (); p; p = NEXT_INSN (p))
2243 if (INSN_P (p))
2244 {
2245 verify_rtx_sharing (PATTERN (p), p);
2246 verify_rtx_sharing (REG_NOTES (p), p);
2247 verify_rtx_sharing (LOG_LINKS (p), p);
2248 }
2249}
2250
d1b81779
GK
2251/* Go through all the RTL insn bodies and copy any invalid shared structure.
2252 Assumes the mark bits are cleared at entry. */
2253
2c07f13b
JH
2254void
2255unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2256{
2257 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2258 if (INSN_P (insn))
d1b81779
GK
2259 {
2260 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2261 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2262 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2263 }
2264}
2265
5c6df058
AO
2266/* Go through all virtual stack slots of a function and copy any
2267 shared structure. */
2268static void
502b8322 2269unshare_all_decls (tree blk)
5c6df058
AO
2270{
2271 tree t;
2272
2273 /* Copy shared decls. */
2274 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2275 if (DECL_RTL_SET_P (t))
2276 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
5c6df058
AO
2277
2278 /* Now process sub-blocks. */
2279 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2280 unshare_all_decls (t);
2281}
2282
2d4aecb3 2283/* Go through all virtual stack slots of a function and mark them as
30f7a378 2284 not shared. */
2d4aecb3 2285static void
502b8322 2286reset_used_decls (tree blk)
2d4aecb3
AO
2287{
2288 tree t;
2289
2290 /* Mark decls. */
2291 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2292 if (DECL_RTL_SET_P (t))
2293 reset_used_flags (DECL_RTL (t));
2d4aecb3
AO
2294
2295 /* Now process sub-blocks. */
2296 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2297 reset_used_decls (t);
2298}
2299
127c1ba5 2300/* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
93fe8e92
RK
2301 placed in the result directly, rather than being copied. MAY_SHARE is
2302 either a MEM of an EXPR_LIST of MEMs. */
127c1ba5
RK
2303
2304rtx
502b8322 2305copy_most_rtx (rtx orig, rtx may_share)
127c1ba5
RK
2306{
2307 rtx copy;
2308 int i, j;
2309 RTX_CODE code;
2310 const char *format_ptr;
2311
93fe8e92
RK
2312 if (orig == may_share
2313 || (GET_CODE (may_share) == EXPR_LIST
2314 && in_expr_list_p (may_share, orig)))
127c1ba5
RK
2315 return orig;
2316
2317 code = GET_CODE (orig);
2318
2319 switch (code)
2320 {
2321 case REG:
2322 case QUEUED:
2323 case CONST_INT:
2324 case CONST_DOUBLE:
2325 case CONST_VECTOR:
2326 case SYMBOL_REF:
2327 case CODE_LABEL:
2328 case PC:
2329 case CC0:
2330 return orig;
2331 default:
2332 break;
2333 }
2334
2335 copy = rtx_alloc (code);
2336 PUT_MODE (copy, GET_MODE (orig));
2adc7f12
JJ
2337 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2338 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2339 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2340 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2341 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
127c1ba5
RK
2342
2343 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2344
2345 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2346 {
2347 switch (*format_ptr++)
2348 {
2349 case 'e':
2350 XEXP (copy, i) = XEXP (orig, i);
2351 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2352 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2353 break;
2354
2355 case 'u':
2356 XEXP (copy, i) = XEXP (orig, i);
2357 break;
2358
2359 case 'E':
2360 case 'V':
2361 XVEC (copy, i) = XVEC (orig, i);
2362 if (XVEC (orig, i) != NULL)
2363 {
2364 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2365 for (j = 0; j < XVECLEN (copy, i); j++)
2366 XVECEXP (copy, i, j)
2367 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2368 }
2369 break;
2370
2371 case 'w':
2372 XWINT (copy, i) = XWINT (orig, i);
2373 break;
2374
2375 case 'n':
2376 case 'i':
2377 XINT (copy, i) = XINT (orig, i);
2378 break;
2379
2380 case 't':
2381 XTREE (copy, i) = XTREE (orig, i);
2382 break;
2383
2384 case 's':
2385 case 'S':
2386 XSTR (copy, i) = XSTR (orig, i);
2387 break;
2388
2389 case '0':
e1de1560 2390 X0ANY (copy, i) = X0ANY (orig, i);
127c1ba5
RK
2391 break;
2392
2393 default:
2394 abort ();
2395 }
2396 }
2397 return copy;
2398}
2399
23b2ce53 2400/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2401 Recursively does the same for subexpressions. Uses
2402 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2403
2404rtx
502b8322 2405copy_rtx_if_shared (rtx orig)
23b2ce53 2406{
32b32b16
AP
2407 copy_rtx_if_shared_1 (&orig);
2408 return orig;
2409}
2410
ff954f39
AP
2411/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2412 use. Recursively does the same for subexpressions. */
2413
32b32b16
AP
2414static void
2415copy_rtx_if_shared_1 (rtx *orig1)
2416{
2417 rtx x;
b3694847
SS
2418 int i;
2419 enum rtx_code code;
32b32b16 2420 rtx *last_ptr;
b3694847 2421 const char *format_ptr;
23b2ce53 2422 int copied = 0;
32b32b16
AP
2423 int length;
2424
2425 /* Repeat is used to turn tail-recursion into iteration. */
2426repeat:
2427 x = *orig1;
23b2ce53
RS
2428
2429 if (x == 0)
32b32b16 2430 return;
23b2ce53
RS
2431
2432 code = GET_CODE (x);
2433
2434 /* These types may be freely shared. */
2435
2436 switch (code)
2437 {
2438 case REG:
2439 case QUEUED:
2440 case CONST_INT:
2441 case CONST_DOUBLE:
69ef87e2 2442 case CONST_VECTOR:
23b2ce53 2443 case SYMBOL_REF:
2c07f13b 2444 case LABEL_REF:
23b2ce53
RS
2445 case CODE_LABEL:
2446 case PC:
2447 case CC0:
2448 case SCRATCH:
0f41302f 2449 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2450 return;
3e89ed8d
JH
2451 case CLOBBER:
2452 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2453 return;
2454 break;
23b2ce53 2455
b851ea09
RK
2456 case CONST:
2457 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2458 a LABEL_REF, it isn't sharable. */
2459 if (GET_CODE (XEXP (x, 0)) == PLUS
2460 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2461 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
32b32b16 2462 return;
b851ea09
RK
2463 break;
2464
23b2ce53
RS
2465 case INSN:
2466 case JUMP_INSN:
2467 case CALL_INSN:
2468 case NOTE:
23b2ce53
RS
2469 case BARRIER:
2470 /* The chain of insns is not being copied. */
32b32b16 2471 return;
23b2ce53 2472
e9a25f70
JL
2473 default:
2474 break;
23b2ce53
RS
2475 }
2476
2477 /* This rtx may not be shared. If it has already been seen,
2478 replace it with a copy of itself. */
2479
2adc7f12 2480 if (RTX_FLAG (x, used))
23b2ce53 2481 {
b3694847 2482 rtx copy;
23b2ce53
RS
2483
2484 copy = rtx_alloc (code);
e1de1560 2485 memcpy (copy, x, RTX_SIZE (code));
23b2ce53
RS
2486 x = copy;
2487 copied = 1;
2488 }
2adc7f12 2489 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2490
2491 /* Now scan the subexpressions recursively.
2492 We can store any replaced subexpressions directly into X
2493 since we know X is not shared! Any vectors in X
2494 must be copied if X was copied. */
2495
2496 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2497 length = GET_RTX_LENGTH (code);
2498 last_ptr = NULL;
2499
2500 for (i = 0; i < length; i++)
23b2ce53
RS
2501 {
2502 switch (*format_ptr++)
2503 {
2504 case 'e':
32b32b16
AP
2505 if (last_ptr)
2506 copy_rtx_if_shared_1 (last_ptr);
2507 last_ptr = &XEXP (x, i);
23b2ce53
RS
2508 break;
2509
2510 case 'E':
2511 if (XVEC (x, i) != NULL)
2512 {
b3694847 2513 int j;
f0722107 2514 int len = XVECLEN (x, i);
32b32b16 2515
6614fd40
KH
2516 /* Copy the vector iff I copied the rtx and the length
2517 is nonzero. */
f0722107 2518 if (copied && len > 0)
8f985ec4 2519 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
32b32b16 2520
5d3cc252 2521 /* Call recursively on all inside the vector. */
f0722107 2522 for (j = 0; j < len; j++)
32b32b16
AP
2523 {
2524 if (last_ptr)
2525 copy_rtx_if_shared_1 (last_ptr);
2526 last_ptr = &XVECEXP (x, i, j);
2527 }
23b2ce53
RS
2528 }
2529 break;
2530 }
2531 }
32b32b16
AP
2532 *orig1 = x;
2533 if (last_ptr)
2534 {
2535 orig1 = last_ptr;
2536 goto repeat;
2537 }
2538 return;
23b2ce53
RS
2539}
2540
2541/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2542 to look for shared sub-parts. */
2543
2544void
502b8322 2545reset_used_flags (rtx x)
23b2ce53 2546{
b3694847
SS
2547 int i, j;
2548 enum rtx_code code;
2549 const char *format_ptr;
32b32b16 2550 int length;
23b2ce53 2551
32b32b16
AP
2552 /* Repeat is used to turn tail-recursion into iteration. */
2553repeat:
23b2ce53
RS
2554 if (x == 0)
2555 return;
2556
2557 code = GET_CODE (x);
2558
9faa82d8 2559 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2560 for them. */
2561
2562 switch (code)
2563 {
2564 case REG:
2565 case QUEUED:
2566 case CONST_INT:
2567 case CONST_DOUBLE:
69ef87e2 2568 case CONST_VECTOR:
23b2ce53
RS
2569 case SYMBOL_REF:
2570 case CODE_LABEL:
2571 case PC:
2572 case CC0:
2573 return;
2574
2575 case INSN:
2576 case JUMP_INSN:
2577 case CALL_INSN:
2578 case NOTE:
2579 case LABEL_REF:
2580 case BARRIER:
2581 /* The chain of insns is not being copied. */
2582 return;
750c9258 2583
e9a25f70
JL
2584 default:
2585 break;
23b2ce53
RS
2586 }
2587
2adc7f12 2588 RTX_FLAG (x, used) = 0;
23b2ce53
RS
2589
2590 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2591 length = GET_RTX_LENGTH (code);
2592
2593 for (i = 0; i < length; i++)
23b2ce53
RS
2594 {
2595 switch (*format_ptr++)
2596 {
2597 case 'e':
32b32b16
AP
2598 if (i == length-1)
2599 {
2600 x = XEXP (x, i);
2601 goto repeat;
2602 }
23b2ce53
RS
2603 reset_used_flags (XEXP (x, i));
2604 break;
2605
2606 case 'E':
2607 for (j = 0; j < XVECLEN (x, i); j++)
2608 reset_used_flags (XVECEXP (x, i, j));
2609 break;
2610 }
2611 }
2612}
2c07f13b
JH
2613
2614/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2615 to look for shared sub-parts. */
2616
2617void
2618set_used_flags (rtx x)
2619{
2620 int i, j;
2621 enum rtx_code code;
2622 const char *format_ptr;
2623
2624 if (x == 0)
2625 return;
2626
2627 code = GET_CODE (x);
2628
2629 /* These types may be freely shared so we needn't do any resetting
2630 for them. */
2631
2632 switch (code)
2633 {
2634 case REG:
2635 case QUEUED:
2636 case CONST_INT:
2637 case CONST_DOUBLE:
2638 case CONST_VECTOR:
2639 case SYMBOL_REF:
2640 case CODE_LABEL:
2641 case PC:
2642 case CC0:
2643 return;
2644
2645 case INSN:
2646 case JUMP_INSN:
2647 case CALL_INSN:
2648 case NOTE:
2649 case LABEL_REF:
2650 case BARRIER:
2651 /* The chain of insns is not being copied. */
2652 return;
2653
2654 default:
2655 break;
2656 }
2657
2658 RTX_FLAG (x, used) = 1;
2659
2660 format_ptr = GET_RTX_FORMAT (code);
2661 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2662 {
2663 switch (*format_ptr++)
2664 {
2665 case 'e':
2666 set_used_flags (XEXP (x, i));
2667 break;
2668
2669 case 'E':
2670 for (j = 0; j < XVECLEN (x, i); j++)
2671 set_used_flags (XVECEXP (x, i, j));
2672 break;
2673 }
2674 }
2675}
23b2ce53
RS
2676\f
2677/* Copy X if necessary so that it won't be altered by changes in OTHER.
2678 Return X or the rtx for the pseudo reg the value of X was copied into.
2679 OTHER must be valid as a SET_DEST. */
2680
2681rtx
502b8322 2682make_safe_from (rtx x, rtx other)
23b2ce53
RS
2683{
2684 while (1)
2685 switch (GET_CODE (other))
2686 {
2687 case SUBREG:
2688 other = SUBREG_REG (other);
2689 break;
2690 case STRICT_LOW_PART:
2691 case SIGN_EXTEND:
2692 case ZERO_EXTEND:
2693 other = XEXP (other, 0);
2694 break;
2695 default:
2696 goto done;
2697 }
2698 done:
2699 if ((GET_CODE (other) == MEM
2700 && ! CONSTANT_P (x)
2701 && GET_CODE (x) != REG
2702 && GET_CODE (x) != SUBREG)
2703 || (GET_CODE (other) == REG
2704 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2705 || reg_mentioned_p (other, x))))
2706 {
2707 rtx temp = gen_reg_rtx (GET_MODE (x));
2708 emit_move_insn (temp, x);
2709 return temp;
2710 }
2711 return x;
2712}
2713\f
2714/* Emission of insns (adding them to the doubly-linked list). */
2715
2716/* Return the first insn of the current sequence or current function. */
2717
2718rtx
502b8322 2719get_insns (void)
23b2ce53
RS
2720{
2721 return first_insn;
2722}
2723
3dec4024
JH
2724/* Specify a new insn as the first in the chain. */
2725
2726void
502b8322 2727set_first_insn (rtx insn)
3dec4024
JH
2728{
2729 if (PREV_INSN (insn) != 0)
2730 abort ();
2731 first_insn = insn;
2732}
2733
23b2ce53
RS
2734/* Return the last insn emitted in current sequence or current function. */
2735
2736rtx
502b8322 2737get_last_insn (void)
23b2ce53
RS
2738{
2739 return last_insn;
2740}
2741
2742/* Specify a new insn as the last in the chain. */
2743
2744void
502b8322 2745set_last_insn (rtx insn)
23b2ce53
RS
2746{
2747 if (NEXT_INSN (insn) != 0)
2748 abort ();
2749 last_insn = insn;
2750}
2751
2752/* Return the last insn emitted, even if it is in a sequence now pushed. */
2753
2754rtx
502b8322 2755get_last_insn_anywhere (void)
23b2ce53
RS
2756{
2757 struct sequence_stack *stack;
2758 if (last_insn)
2759 return last_insn;
49ad7cfa 2760 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2761 if (stack->last != 0)
2762 return stack->last;
2763 return 0;
2764}
2765
2a496e8b
JDA
2766/* Return the first nonnote insn emitted in current sequence or current
2767 function. This routine looks inside SEQUENCEs. */
2768
2769rtx
502b8322 2770get_first_nonnote_insn (void)
2a496e8b
JDA
2771{
2772 rtx insn = first_insn;
2773
2774 while (insn)
2775 {
2776 insn = next_insn (insn);
2777 if (insn == 0 || GET_CODE (insn) != NOTE)
2778 break;
2779 }
2780
2781 return insn;
2782}
2783
2784/* Return the last nonnote insn emitted in current sequence or current
2785 function. This routine looks inside SEQUENCEs. */
2786
2787rtx
502b8322 2788get_last_nonnote_insn (void)
2a496e8b
JDA
2789{
2790 rtx insn = last_insn;
2791
2792 while (insn)
2793 {
2794 insn = previous_insn (insn);
2795 if (insn == 0 || GET_CODE (insn) != NOTE)
2796 break;
2797 }
2798
2799 return insn;
2800}
2801
23b2ce53
RS
2802/* Return a number larger than any instruction's uid in this function. */
2803
2804int
502b8322 2805get_max_uid (void)
23b2ce53
RS
2806{
2807 return cur_insn_uid;
2808}
aeeeda03 2809
673b5311
MM
2810/* Renumber instructions so that no instruction UIDs are wasted. */
2811
aeeeda03 2812void
502b8322 2813renumber_insns (FILE *stream)
aeeeda03
MM
2814{
2815 rtx insn;
aeeeda03 2816
673b5311
MM
2817 /* If we're not supposed to renumber instructions, don't. */
2818 if (!flag_renumber_insns)
2819 return;
2820
aeeeda03
MM
2821 /* If there aren't that many instructions, then it's not really
2822 worth renumbering them. */
673b5311 2823 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
aeeeda03
MM
2824 return;
2825
2826 cur_insn_uid = 1;
2827
2828 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
673b5311
MM
2829 {
2830 if (stream)
750c9258 2831 fprintf (stream, "Renumbering insn %d to %d\n",
673b5311
MM
2832 INSN_UID (insn), cur_insn_uid);
2833 INSN_UID (insn) = cur_insn_uid++;
2834 }
aeeeda03 2835}
23b2ce53
RS
2836\f
2837/* Return the next insn. If it is a SEQUENCE, return the first insn
2838 of the sequence. */
2839
2840rtx
502b8322 2841next_insn (rtx insn)
23b2ce53
RS
2842{
2843 if (insn)
2844 {
2845 insn = NEXT_INSN (insn);
2846 if (insn && GET_CODE (insn) == INSN
2847 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2848 insn = XVECEXP (PATTERN (insn), 0, 0);
2849 }
2850
2851 return insn;
2852}
2853
2854/* Return the previous insn. If it is a SEQUENCE, return the last insn
2855 of the sequence. */
2856
2857rtx
502b8322 2858previous_insn (rtx insn)
23b2ce53
RS
2859{
2860 if (insn)
2861 {
2862 insn = PREV_INSN (insn);
2863 if (insn && GET_CODE (insn) == INSN
2864 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2865 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2866 }
2867
2868 return insn;
2869}
2870
2871/* Return the next insn after INSN that is not a NOTE. This routine does not
2872 look inside SEQUENCEs. */
2873
2874rtx
502b8322 2875next_nonnote_insn (rtx insn)
23b2ce53
RS
2876{
2877 while (insn)
2878 {
2879 insn = NEXT_INSN (insn);
2880 if (insn == 0 || GET_CODE (insn) != NOTE)
2881 break;
2882 }
2883
2884 return insn;
2885}
2886
2887/* Return the previous insn before INSN that is not a NOTE. This routine does
2888 not look inside SEQUENCEs. */
2889
2890rtx
502b8322 2891prev_nonnote_insn (rtx insn)
23b2ce53
RS
2892{
2893 while (insn)
2894 {
2895 insn = PREV_INSN (insn);
2896 if (insn == 0 || GET_CODE (insn) != NOTE)
2897 break;
2898 }
2899
2900 return insn;
2901}
2902
2903/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2904 or 0, if there is none. This routine does not look inside
0f41302f 2905 SEQUENCEs. */
23b2ce53
RS
2906
2907rtx
502b8322 2908next_real_insn (rtx insn)
23b2ce53
RS
2909{
2910 while (insn)
2911 {
2912 insn = NEXT_INSN (insn);
2913 if (insn == 0 || GET_CODE (insn) == INSN
2914 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2915 break;
2916 }
2917
2918 return insn;
2919}
2920
2921/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2922 or 0, if there is none. This routine does not look inside
2923 SEQUENCEs. */
2924
2925rtx
502b8322 2926prev_real_insn (rtx insn)
23b2ce53
RS
2927{
2928 while (insn)
2929 {
2930 insn = PREV_INSN (insn);
2931 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2932 || GET_CODE (insn) == JUMP_INSN)
2933 break;
2934 }
2935
2936 return insn;
2937}
2938
ee960939
OH
2939/* Return the last CALL_INSN in the current list, or 0 if there is none.
2940 This routine does not look inside SEQUENCEs. */
2941
2942rtx
502b8322 2943last_call_insn (void)
ee960939
OH
2944{
2945 rtx insn;
2946
2947 for (insn = get_last_insn ();
2948 insn && GET_CODE (insn) != CALL_INSN;
2949 insn = PREV_INSN (insn))
2950 ;
2951
2952 return insn;
2953}
2954
23b2ce53
RS
2955/* Find the next insn after INSN that really does something. This routine
2956 does not look inside SEQUENCEs. Until reload has completed, this is the
2957 same as next_real_insn. */
2958
69732dcb 2959int
502b8322 2960active_insn_p (rtx insn)
69732dcb 2961{
23b8ba81
RH
2962 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2963 || (GET_CODE (insn) == INSN
2964 && (! reload_completed
2965 || (GET_CODE (PATTERN (insn)) != USE
2966 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
2967}
2968
23b2ce53 2969rtx
502b8322 2970next_active_insn (rtx insn)
23b2ce53
RS
2971{
2972 while (insn)
2973 {
2974 insn = NEXT_INSN (insn);
69732dcb 2975 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
2976 break;
2977 }
2978
2979 return insn;
2980}
2981
2982/* Find the last insn before INSN that really does something. This routine
2983 does not look inside SEQUENCEs. Until reload has completed, this is the
2984 same as prev_real_insn. */
2985
2986rtx
502b8322 2987prev_active_insn (rtx insn)
23b2ce53
RS
2988{
2989 while (insn)
2990 {
2991 insn = PREV_INSN (insn);
69732dcb 2992 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
2993 break;
2994 }
2995
2996 return insn;
2997}
2998
2999/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3000
3001rtx
502b8322 3002next_label (rtx insn)
23b2ce53
RS
3003{
3004 while (insn)
3005 {
3006 insn = NEXT_INSN (insn);
3007 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3008 break;
3009 }
3010
3011 return insn;
3012}
3013
3014/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3015
3016rtx
502b8322 3017prev_label (rtx insn)
23b2ce53
RS
3018{
3019 while (insn)
3020 {
3021 insn = PREV_INSN (insn);
3022 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3023 break;
3024 }
3025
3026 return insn;
3027}
3028\f
3029#ifdef HAVE_cc0
c572e5ba
JVA
3030/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3031 and REG_CC_USER notes so we can find it. */
3032
3033void
502b8322 3034link_cc0_insns (rtx insn)
c572e5ba
JVA
3035{
3036 rtx user = next_nonnote_insn (insn);
3037
3038 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3039 user = XVECEXP (PATTERN (user), 0, 0);
3040
c5c76735
JL
3041 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3042 REG_NOTES (user));
3b80f6ca 3043 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
c572e5ba
JVA
3044}
3045
23b2ce53
RS
3046/* Return the next insn that uses CC0 after INSN, which is assumed to
3047 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3048 applied to the result of this function should yield INSN).
3049
3050 Normally, this is simply the next insn. However, if a REG_CC_USER note
3051 is present, it contains the insn that uses CC0.
3052
3053 Return 0 if we can't find the insn. */
3054
3055rtx
502b8322 3056next_cc0_user (rtx insn)
23b2ce53 3057{
906c4e36 3058 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3059
3060 if (note)
3061 return XEXP (note, 0);
3062
3063 insn = next_nonnote_insn (insn);
3064 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3065 insn = XVECEXP (PATTERN (insn), 0, 0);
3066
2c3c49de 3067 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3068 return insn;
3069
3070 return 0;
3071}
3072
3073/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3074 note, it is the previous insn. */
3075
3076rtx
502b8322 3077prev_cc0_setter (rtx insn)
23b2ce53 3078{
906c4e36 3079 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3080
3081 if (note)
3082 return XEXP (note, 0);
3083
3084 insn = prev_nonnote_insn (insn);
3085 if (! sets_cc0_p (PATTERN (insn)))
3086 abort ();
3087
3088 return insn;
3089}
3090#endif
e5bef2e4
HB
3091
3092/* Increment the label uses for all labels present in rtx. */
3093
3094static void
502b8322 3095mark_label_nuses (rtx x)
e5bef2e4 3096{
b3694847
SS
3097 enum rtx_code code;
3098 int i, j;
3099 const char *fmt;
e5bef2e4
HB
3100
3101 code = GET_CODE (x);
3102 if (code == LABEL_REF)
3103 LABEL_NUSES (XEXP (x, 0))++;
3104
3105 fmt = GET_RTX_FORMAT (code);
3106 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3107 {
3108 if (fmt[i] == 'e')
0fb7aeda 3109 mark_label_nuses (XEXP (x, i));
e5bef2e4 3110 else if (fmt[i] == 'E')
0fb7aeda 3111 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3112 mark_label_nuses (XVECEXP (x, i, j));
3113 }
3114}
3115
23b2ce53
RS
3116\f
3117/* Try splitting insns that can be split for better scheduling.
3118 PAT is the pattern which might split.
3119 TRIAL is the insn providing PAT.
cc2902df 3120 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3121
3122 If this routine succeeds in splitting, it returns the first or last
11147ebe 3123 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3124 returns TRIAL. If the insn to be returned can be split, it will be. */
3125
3126rtx
502b8322 3127try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3128{
3129 rtx before = PREV_INSN (trial);
3130 rtx after = NEXT_INSN (trial);
23b2ce53
RS
3131 int has_barrier = 0;
3132 rtx tem;
6b24c259
JH
3133 rtx note, seq;
3134 int probability;
599aedd9
RH
3135 rtx insn_last, insn;
3136 int njumps = 0;
6b24c259
JH
3137
3138 if (any_condjump_p (trial)
3139 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3140 split_branch_probability = INTVAL (XEXP (note, 0));
3141 probability = split_branch_probability;
3142
3143 seq = split_insns (pat, trial);
3144
3145 split_branch_probability = -1;
23b2ce53
RS
3146
3147 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3148 We may need to handle this specially. */
3149 if (after && GET_CODE (after) == BARRIER)
3150 {
3151 has_barrier = 1;
3152 after = NEXT_INSN (after);
3153 }
3154
599aedd9
RH
3155 if (!seq)
3156 return trial;
3157
3158 /* Avoid infinite loop if any insn of the result matches
3159 the original pattern. */
3160 insn_last = seq;
3161 while (1)
23b2ce53 3162 {
599aedd9
RH
3163 if (INSN_P (insn_last)
3164 && rtx_equal_p (PATTERN (insn_last), pat))
3165 return trial;
3166 if (!NEXT_INSN (insn_last))
3167 break;
3168 insn_last = NEXT_INSN (insn_last);
3169 }
750c9258 3170
599aedd9
RH
3171 /* Mark labels. */
3172 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3173 {
3174 if (GET_CODE (insn) == JUMP_INSN)
3175 {
3176 mark_jump_label (PATTERN (insn), insn, 0);
3177 njumps++;
3178 if (probability != -1
3179 && any_condjump_p (insn)
3180 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3181 {
599aedd9
RH
3182 /* We can preserve the REG_BR_PROB notes only if exactly
3183 one jump is created, otherwise the machine description
3184 is responsible for this step using
3185 split_branch_probability variable. */
3186 if (njumps != 1)
3187 abort ();
3188 REG_NOTES (insn)
3189 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3190 GEN_INT (probability),
3191 REG_NOTES (insn));
2f937369 3192 }
599aedd9
RH
3193 }
3194 }
3195
3196 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3197 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3198 if (GET_CODE (trial) == CALL_INSN)
3199 {
3200 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3201 if (GET_CODE (insn) == CALL_INSN)
3202 {
f6a1f3f6
RH
3203 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3204 while (*p)
3205 p = &XEXP (*p, 1);
3206 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9
RH
3207 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3208 }
3209 }
4b5e8abe 3210
599aedd9
RH
3211 /* Copy notes, particularly those related to the CFG. */
3212 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3213 {
3214 switch (REG_NOTE_KIND (note))
3215 {
3216 case REG_EH_REGION:
2f937369
DM
3217 insn = insn_last;
3218 while (insn != NULL_RTX)
3219 {
599aedd9
RH
3220 if (GET_CODE (insn) == CALL_INSN
3221 || (flag_non_call_exceptions
3222 && may_trap_p (PATTERN (insn))))
3223 REG_NOTES (insn)
3224 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3225 XEXP (note, 0),
3226 REG_NOTES (insn));
2f937369
DM
3227 insn = PREV_INSN (insn);
3228 }
599aedd9 3229 break;
216183ce 3230
599aedd9
RH
3231 case REG_NORETURN:
3232 case REG_SETJMP:
3233 case REG_ALWAYS_RETURN:
3234 insn = insn_last;
3235 while (insn != NULL_RTX)
216183ce 3236 {
599aedd9
RH
3237 if (GET_CODE (insn) == CALL_INSN)
3238 REG_NOTES (insn)
3239 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3240 XEXP (note, 0),
3241 REG_NOTES (insn));
3242 insn = PREV_INSN (insn);
216183ce 3243 }
599aedd9 3244 break;
d6e95df8 3245
599aedd9
RH
3246 case REG_NON_LOCAL_GOTO:
3247 insn = insn_last;
3248 while (insn != NULL_RTX)
2f937369 3249 {
599aedd9
RH
3250 if (GET_CODE (insn) == JUMP_INSN)
3251 REG_NOTES (insn)
3252 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3253 XEXP (note, 0),
3254 REG_NOTES (insn));
3255 insn = PREV_INSN (insn);
2f937369 3256 }
599aedd9 3257 break;
e5bef2e4 3258
599aedd9
RH
3259 default:
3260 break;
23b2ce53 3261 }
599aedd9
RH
3262 }
3263
3264 /* If there are LABELS inside the split insns increment the
3265 usage count so we don't delete the label. */
3266 if (GET_CODE (trial) == INSN)
3267 {
3268 insn = insn_last;
3269 while (insn != NULL_RTX)
23b2ce53 3270 {
599aedd9
RH
3271 if (GET_CODE (insn) == INSN)
3272 mark_label_nuses (PATTERN (insn));
23b2ce53 3273
599aedd9
RH
3274 insn = PREV_INSN (insn);
3275 }
23b2ce53
RS
3276 }
3277
0435312e 3278 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3279
3280 delete_insn (trial);
3281 if (has_barrier)
3282 emit_barrier_after (tem);
3283
3284 /* Recursively call try_split for each new insn created; by the
3285 time control returns here that insn will be fully split, so
3286 set LAST and continue from the insn after the one returned.
3287 We can't use next_active_insn here since AFTER may be a note.
3288 Ignore deleted insns, which can be occur if not optimizing. */
3289 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3290 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3291 tem = try_split (PATTERN (tem), tem, 1);
3292
3293 /* Return either the first or the last insn, depending on which was
3294 requested. */
3295 return last
3296 ? (after ? PREV_INSN (after) : last_insn)
3297 : NEXT_INSN (before);
23b2ce53
RS
3298}
3299\f
3300/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3301 Store PATTERN in the pattern slots. */
23b2ce53
RS
3302
3303rtx
502b8322 3304make_insn_raw (rtx pattern)
23b2ce53 3305{
b3694847 3306 rtx insn;
23b2ce53 3307
1f8f4a0b 3308 insn = rtx_alloc (INSN);
23b2ce53 3309
43127294 3310 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3311 PATTERN (insn) = pattern;
3312 INSN_CODE (insn) = -1;
1632afca
RS
3313 LOG_LINKS (insn) = NULL;
3314 REG_NOTES (insn) = NULL;
0435312e 3315 INSN_LOCATOR (insn) = 0;
ba4f7968 3316 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3317
47984720
NC
3318#ifdef ENABLE_RTL_CHECKING
3319 if (insn
2c3c49de 3320 && INSN_P (insn)
47984720
NC
3321 && (returnjump_p (insn)
3322 || (GET_CODE (insn) == SET
3323 && SET_DEST (insn) == pc_rtx)))
3324 {
3325 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3326 debug_rtx (insn);
3327 }
3328#endif
750c9258 3329
23b2ce53
RS
3330 return insn;
3331}
3332
2f937369 3333/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53
RS
3334
3335static rtx
502b8322 3336make_jump_insn_raw (rtx pattern)
23b2ce53 3337{
b3694847 3338 rtx insn;
23b2ce53 3339
4b1f5e8c 3340 insn = rtx_alloc (JUMP_INSN);
1632afca 3341 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3342
3343 PATTERN (insn) = pattern;
3344 INSN_CODE (insn) = -1;
1632afca
RS
3345 LOG_LINKS (insn) = NULL;
3346 REG_NOTES (insn) = NULL;
3347 JUMP_LABEL (insn) = NULL;
0435312e 3348 INSN_LOCATOR (insn) = 0;
ba4f7968 3349 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3350
3351 return insn;
3352}
aff507f4 3353
2f937369 3354/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3355
3356static rtx
502b8322 3357make_call_insn_raw (rtx pattern)
aff507f4 3358{
b3694847 3359 rtx insn;
aff507f4
RK
3360
3361 insn = rtx_alloc (CALL_INSN);
3362 INSN_UID (insn) = cur_insn_uid++;
3363
3364 PATTERN (insn) = pattern;
3365 INSN_CODE (insn) = -1;
3366 LOG_LINKS (insn) = NULL;
3367 REG_NOTES (insn) = NULL;
3368 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
0435312e 3369 INSN_LOCATOR (insn) = 0;
ba4f7968 3370 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3371
3372 return insn;
3373}
23b2ce53
RS
3374\f
3375/* Add INSN to the end of the doubly-linked list.
3376 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3377
3378void
502b8322 3379add_insn (rtx insn)
23b2ce53
RS
3380{
3381 PREV_INSN (insn) = last_insn;
3382 NEXT_INSN (insn) = 0;
3383
3384 if (NULL != last_insn)
3385 NEXT_INSN (last_insn) = insn;
3386
3387 if (NULL == first_insn)
3388 first_insn = insn;
3389
3390 last_insn = insn;
3391}
3392
a0ae8e8d
RK
3393/* Add INSN into the doubly-linked list after insn AFTER. This and
3394 the next should be the only functions called to insert an insn once
ba213285 3395 delay slots have been filled since only they know how to update a
a0ae8e8d 3396 SEQUENCE. */
23b2ce53
RS
3397
3398void
502b8322 3399add_insn_after (rtx insn, rtx after)
23b2ce53
RS
3400{
3401 rtx next = NEXT_INSN (after);
3c030e88 3402 basic_block bb;
23b2ce53 3403
6782074d 3404 if (optimize && INSN_DELETED_P (after))
ba213285
RK
3405 abort ();
3406
23b2ce53
RS
3407 NEXT_INSN (insn) = next;
3408 PREV_INSN (insn) = after;
3409
3410 if (next)
3411 {
3412 PREV_INSN (next) = insn;
3413 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3414 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3415 }
3416 else if (last_insn == after)
3417 last_insn = insn;
3418 else
3419 {
49ad7cfa 3420 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3421 /* Scan all pending sequences too. */
3422 for (; stack; stack = stack->next)
3423 if (after == stack->last)
fef0509b
RK
3424 {
3425 stack->last = insn;
3426 break;
3427 }
a0ae8e8d
RK
3428
3429 if (stack == 0)
3430 abort ();
23b2ce53
RS
3431 }
3432
ba4f7968
JH
3433 if (GET_CODE (after) != BARRIER
3434 && GET_CODE (insn) != BARRIER
3c030e88
JH
3435 && (bb = BLOCK_FOR_INSN (after)))
3436 {
3437 set_block_for_insn (insn, bb);
38c1593d 3438 if (INSN_P (insn))
68252e27 3439 bb->flags |= BB_DIRTY;
3c030e88 3440 /* Should not happen as first in the BB is always
a1f300c0 3441 either NOTE or LABEL. */
a813c111 3442 if (BB_END (bb) == after
3c030e88
JH
3443 /* Avoid clobbering of structure when creating new BB. */
3444 && GET_CODE (insn) != BARRIER
3445 && (GET_CODE (insn) != NOTE
3446 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
a813c111 3447 BB_END (bb) = insn;
3c030e88
JH
3448 }
3449
23b2ce53
RS
3450 NEXT_INSN (after) = insn;
3451 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3452 {
3453 rtx sequence = PATTERN (after);
3454 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3455 }
3456}
3457
a0ae8e8d
RK
3458/* Add INSN into the doubly-linked list before insn BEFORE. This and
3459 the previous should be the only functions called to insert an insn once
ba213285 3460 delay slots have been filled since only they know how to update a
a0ae8e8d
RK
3461 SEQUENCE. */
3462
3463void
502b8322 3464add_insn_before (rtx insn, rtx before)
a0ae8e8d
RK
3465{
3466 rtx prev = PREV_INSN (before);
3c030e88 3467 basic_block bb;
a0ae8e8d 3468
6782074d 3469 if (optimize && INSN_DELETED_P (before))
ba213285
RK
3470 abort ();
3471
a0ae8e8d
RK
3472 PREV_INSN (insn) = prev;
3473 NEXT_INSN (insn) = before;
3474
3475 if (prev)
3476 {
3477 NEXT_INSN (prev) = insn;
3478 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3479 {
3480 rtx sequence = PATTERN (prev);
3481 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3482 }
3483 }
3484 else if (first_insn == before)
3485 first_insn = insn;
3486 else
3487 {
49ad7cfa 3488 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3489 /* Scan all pending sequences too. */
3490 for (; stack; stack = stack->next)
3491 if (before == stack->first)
fef0509b
RK
3492 {
3493 stack->first = insn;
3494 break;
3495 }
a0ae8e8d
RK
3496
3497 if (stack == 0)
3498 abort ();
3499 }
3500
ba4f7968
JH
3501 if (GET_CODE (before) != BARRIER
3502 && GET_CODE (insn) != BARRIER
3c030e88
JH
3503 && (bb = BLOCK_FOR_INSN (before)))
3504 {
3505 set_block_for_insn (insn, bb);
38c1593d 3506 if (INSN_P (insn))
68252e27 3507 bb->flags |= BB_DIRTY;
3c030e88 3508 /* Should not happen as first in the BB is always
a1f300c0 3509 either NOTE or LABEl. */
a813c111 3510 if (BB_HEAD (bb) == insn
3c030e88
JH
3511 /* Avoid clobbering of structure when creating new BB. */
3512 && GET_CODE (insn) != BARRIER
3513 && (GET_CODE (insn) != NOTE
3514 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3515 abort ();
3516 }
3517
a0ae8e8d
RK
3518 PREV_INSN (before) = insn;
3519 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3520 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3521}
3522
89e99eea
DB
3523/* Remove an insn from its doubly-linked list. This function knows how
3524 to handle sequences. */
3525void
502b8322 3526remove_insn (rtx insn)
89e99eea
DB
3527{
3528 rtx next = NEXT_INSN (insn);
3529 rtx prev = PREV_INSN (insn);
53c17031
JH
3530 basic_block bb;
3531
89e99eea
DB
3532 if (prev)
3533 {
3534 NEXT_INSN (prev) = next;
3535 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3536 {
3537 rtx sequence = PATTERN (prev);
3538 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3539 }
3540 }
3541 else if (first_insn == insn)
3542 first_insn = next;
3543 else
3544 {
49ad7cfa 3545 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3546 /* Scan all pending sequences too. */
3547 for (; stack; stack = stack->next)
3548 if (insn == stack->first)
3549 {
3550 stack->first = next;
3551 break;
3552 }
3553
3554 if (stack == 0)
3555 abort ();
3556 }
3557
3558 if (next)
3559 {
3560 PREV_INSN (next) = prev;
3561 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3562 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3563 }
3564 else if (last_insn == insn)
3565 last_insn = prev;
3566 else
3567 {
49ad7cfa 3568 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3569 /* Scan all pending sequences too. */
3570 for (; stack; stack = stack->next)
3571 if (insn == stack->last)
3572 {
3573 stack->last = prev;
3574 break;
3575 }
3576
3577 if (stack == 0)
3578 abort ();
3579 }
ba4f7968 3580 if (GET_CODE (insn) != BARRIER
53c17031
JH
3581 && (bb = BLOCK_FOR_INSN (insn)))
3582 {
38c1593d 3583 if (INSN_P (insn))
68252e27 3584 bb->flags |= BB_DIRTY;
a813c111 3585 if (BB_HEAD (bb) == insn)
53c17031 3586 {
3bf1e984
RK
3587 /* Never ever delete the basic block note without deleting whole
3588 basic block. */
53c17031
JH
3589 if (GET_CODE (insn) == NOTE)
3590 abort ();
a813c111 3591 BB_HEAD (bb) = next;
53c17031 3592 }
a813c111
SB
3593 if (BB_END (bb) == insn)
3594 BB_END (bb) = prev;
53c17031 3595 }
89e99eea
DB
3596}
3597
ee960939
OH
3598/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3599
3600void
502b8322 3601add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939
OH
3602{
3603 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3604 abort ();
3605
3606 /* Put the register usage information on the CALL. If there is already
3607 some usage information, put ours at the end. */
3608 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3609 {
3610 rtx link;
3611
3612 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3613 link = XEXP (link, 1))
3614 ;
3615
3616 XEXP (link, 1) = call_fusage;
3617 }
3618 else
3619 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3620}
3621
23b2ce53
RS
3622/* Delete all insns made since FROM.
3623 FROM becomes the new last instruction. */
3624
3625void
502b8322 3626delete_insns_since (rtx from)
23b2ce53
RS
3627{
3628 if (from == 0)
3629 first_insn = 0;
3630 else
3631 NEXT_INSN (from) = 0;
3632 last_insn = from;
3633}
3634
5dab5552
MS
3635/* This function is deprecated, please use sequences instead.
3636
3637 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3638 The insns to be moved are those between FROM and TO.
3639 They are moved to a new position after the insn AFTER.
3640 AFTER must not be FROM or TO or any insn in between.
3641
3642 This function does not know about SEQUENCEs and hence should not be
3643 called after delay-slot filling has been done. */
3644
3645void
502b8322 3646reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53
RS
3647{
3648 /* Splice this bunch out of where it is now. */
3649 if (PREV_INSN (from))
3650 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3651 if (NEXT_INSN (to))
3652 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3653 if (last_insn == to)
3654 last_insn = PREV_INSN (from);
3655 if (first_insn == from)
3656 first_insn = NEXT_INSN (to);
3657
3658 /* Make the new neighbors point to it and it to them. */
3659 if (NEXT_INSN (after))
3660 PREV_INSN (NEXT_INSN (after)) = to;
3661
3662 NEXT_INSN (to) = NEXT_INSN (after);
3663 PREV_INSN (from) = after;
3664 NEXT_INSN (after) = from;
3665 if (after == last_insn)
3666 last_insn = to;
3667}
3668
3c030e88
JH
3669/* Same as function above, but take care to update BB boundaries. */
3670void
502b8322 3671reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
3672{
3673 rtx prev = PREV_INSN (from);
3674 basic_block bb, bb2;
3675
3676 reorder_insns_nobb (from, to, after);
3677
ba4f7968 3678 if (GET_CODE (after) != BARRIER
3c030e88
JH
3679 && (bb = BLOCK_FOR_INSN (after)))
3680 {
3681 rtx x;
38c1593d 3682 bb->flags |= BB_DIRTY;
68252e27 3683
ba4f7968 3684 if (GET_CODE (from) != BARRIER
3c030e88
JH
3685 && (bb2 = BLOCK_FOR_INSN (from)))
3686 {
a813c111
SB
3687 if (BB_END (bb2) == to)
3688 BB_END (bb2) = prev;
38c1593d 3689 bb2->flags |= BB_DIRTY;
3c030e88
JH
3690 }
3691
a813c111
SB
3692 if (BB_END (bb) == after)
3693 BB_END (bb) = to;
3c030e88
JH
3694
3695 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3696 set_block_for_insn (x, bb);
3697 }
3698}
3699
23b2ce53
RS
3700/* Return the line note insn preceding INSN. */
3701
3702static rtx
502b8322 3703find_line_note (rtx insn)
23b2ce53
RS
3704{
3705 if (no_line_numbers)
3706 return 0;
3707
3708 for (; insn; insn = PREV_INSN (insn))
3709 if (GET_CODE (insn) == NOTE
0fb7aeda 3710 && NOTE_LINE_NUMBER (insn) >= 0)
23b2ce53
RS
3711 break;
3712
3713 return insn;
3714}
3715
3716/* Like reorder_insns, but inserts line notes to preserve the line numbers
3717 of the moved insns when debugging. This may insert a note between AFTER
3718 and FROM, and another one after TO. */
3719
3720void
502b8322 3721reorder_insns_with_line_notes (rtx from, rtx to, rtx after)
23b2ce53
RS
3722{
3723 rtx from_line = find_line_note (from);
3724 rtx after_line = find_line_note (after);
3725
3726 reorder_insns (from, to, after);
3727
3728 if (from_line == after_line)
3729 return;
3730
3731 if (from_line)
5f2fc772 3732 emit_note_copy_after (from_line, after);
23b2ce53 3733 if (after_line)
5f2fc772 3734 emit_note_copy_after (after_line, to);
23b2ce53 3735}
aeeeda03 3736
64b59a80 3737/* Remove unnecessary notes from the instruction stream. */
aeeeda03
MM
3738
3739void
502b8322 3740remove_unnecessary_notes (void)
aeeeda03 3741{
542d73ae
RH
3742 rtx block_stack = NULL_RTX;
3743 rtx eh_stack = NULL_RTX;
aeeeda03
MM
3744 rtx insn;
3745 rtx next;
542d73ae 3746 rtx tmp;
aeeeda03 3747
116eebd6
MM
3748 /* We must not remove the first instruction in the function because
3749 the compiler depends on the first instruction being a note. */
aeeeda03
MM
3750 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3751 {
3752 /* Remember what's next. */
3753 next = NEXT_INSN (insn);
3754
3755 /* We're only interested in notes. */
3756 if (GET_CODE (insn) != NOTE)
3757 continue;
3758
542d73ae 3759 switch (NOTE_LINE_NUMBER (insn))
18c038b9 3760 {
542d73ae 3761 case NOTE_INSN_DELETED:
e803a64b 3762 case NOTE_INSN_LOOP_END_TOP_COND:
542d73ae
RH
3763 remove_insn (insn);
3764 break;
3765
3766 case NOTE_INSN_EH_REGION_BEG:
3767 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3768 break;
3769
3770 case NOTE_INSN_EH_REGION_END:
3771 /* Too many end notes. */
3772 if (eh_stack == NULL_RTX)
3773 abort ();
3774 /* Mismatched nesting. */
3775 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3776 abort ();
3777 tmp = eh_stack;
3778 eh_stack = XEXP (eh_stack, 1);
3779 free_INSN_LIST_node (tmp);
3780 break;
3781
3782 case NOTE_INSN_BLOCK_BEG:
3783 /* By now, all notes indicating lexical blocks should have
3784 NOTE_BLOCK filled in. */
3785 if (NOTE_BLOCK (insn) == NULL_TREE)
3786 abort ();
3787 block_stack = alloc_INSN_LIST (insn, block_stack);
3788 break;
3789
3790 case NOTE_INSN_BLOCK_END:
3791 /* Too many end notes. */
3792 if (block_stack == NULL_RTX)
3793 abort ();
3794 /* Mismatched nesting. */
3795 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3796 abort ();
3797 tmp = block_stack;
3798 block_stack = XEXP (block_stack, 1);
3799 free_INSN_LIST_node (tmp);
3800
18c038b9
MM
3801 /* Scan back to see if there are any non-note instructions
3802 between INSN and the beginning of this block. If not,
3803 then there is no PC range in the generated code that will
3804 actually be in this block, so there's no point in
3805 remembering the existence of the block. */
68252e27 3806 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
18c038b9
MM
3807 {
3808 /* This block contains a real instruction. Note that we
3809 don't include labels; if the only thing in the block
3810 is a label, then there are still no PC values that
3811 lie within the block. */
542d73ae 3812 if (INSN_P (tmp))
18c038b9
MM
3813 break;
3814
3815 /* We're only interested in NOTEs. */
542d73ae 3816 if (GET_CODE (tmp) != NOTE)
18c038b9
MM
3817 continue;
3818
542d73ae 3819 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
18c038b9 3820 {
e1772ac0
NB
3821 /* We just verified that this BLOCK matches us with
3822 the block_stack check above. Never delete the
3823 BLOCK for the outermost scope of the function; we
3824 can refer to names from that scope even if the
3825 block notes are messed up. */
3826 if (! is_body_block (NOTE_BLOCK (insn))
3827 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
deb5e280 3828 {
542d73ae 3829 remove_insn (tmp);
deb5e280
JM
3830 remove_insn (insn);
3831 }
18c038b9
MM
3832 break;
3833 }
542d73ae 3834 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
18c038b9
MM
3835 /* There's a nested block. We need to leave the
3836 current block in place since otherwise the debugger
3837 wouldn't be able to show symbols from our block in
3838 the nested block. */
3839 break;
3840 }
3841 }
aeeeda03 3842 }
542d73ae
RH
3843
3844 /* Too many begin notes. */
3845 if (block_stack || eh_stack)
3846 abort ();
aeeeda03
MM
3847}
3848
23b2ce53 3849\f
2f937369
DM
3850/* Emit insn(s) of given code and pattern
3851 at a specified place within the doubly-linked list.
23b2ce53 3852
2f937369
DM
3853 All of the emit_foo global entry points accept an object
3854 X which is either an insn list or a PATTERN of a single
3855 instruction.
23b2ce53 3856
2f937369
DM
3857 There are thus a few canonical ways to generate code and
3858 emit it at a specific place in the instruction stream. For
3859 example, consider the instruction named SPOT and the fact that
3860 we would like to emit some instructions before SPOT. We might
3861 do it like this:
23b2ce53 3862
2f937369
DM
3863 start_sequence ();
3864 ... emit the new instructions ...
3865 insns_head = get_insns ();
3866 end_sequence ();
23b2ce53 3867
2f937369 3868 emit_insn_before (insns_head, SPOT);
23b2ce53 3869
2f937369
DM
3870 It used to be common to generate SEQUENCE rtl instead, but that
3871 is a relic of the past which no longer occurs. The reason is that
3872 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3873 generated would almost certainly die right after it was created. */
23b2ce53 3874
2f937369 3875/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
3876
3877rtx
502b8322 3878emit_insn_before (rtx x, rtx before)
23b2ce53 3879{
2f937369 3880 rtx last = before;
b3694847 3881 rtx insn;
23b2ce53 3882
2f937369
DM
3883#ifdef ENABLE_RTL_CHECKING
3884 if (before == NULL_RTX)
3885 abort ();
3886#endif
3887
3888 if (x == NULL_RTX)
3889 return last;
3890
3891 switch (GET_CODE (x))
23b2ce53 3892 {
2f937369
DM
3893 case INSN:
3894 case JUMP_INSN:
3895 case CALL_INSN:
3896 case CODE_LABEL:
3897 case BARRIER:
3898 case NOTE:
3899 insn = x;
3900 while (insn)
3901 {
3902 rtx next = NEXT_INSN (insn);
3903 add_insn_before (insn, before);
3904 last = insn;
3905 insn = next;
3906 }
3907 break;
3908
3909#ifdef ENABLE_RTL_CHECKING
3910 case SEQUENCE:
3911 abort ();
3912 break;
3913#endif
3914
3915 default:
3916 last = make_insn_raw (x);
3917 add_insn_before (last, before);
3918 break;
23b2ce53
RS
3919 }
3920
2f937369 3921 return last;
23b2ce53
RS
3922}
3923
2f937369 3924/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
3925 and output it before the instruction BEFORE. */
3926
3927rtx
502b8322 3928emit_jump_insn_before (rtx x, rtx before)
23b2ce53 3929{
d950dee3 3930 rtx insn, last = NULL_RTX;
aff507f4 3931
2f937369
DM
3932#ifdef ENABLE_RTL_CHECKING
3933 if (before == NULL_RTX)
3934 abort ();
3935#endif
3936
3937 switch (GET_CODE (x))
aff507f4 3938 {
2f937369
DM
3939 case INSN:
3940 case JUMP_INSN:
3941 case CALL_INSN:
3942 case CODE_LABEL:
3943 case BARRIER:
3944 case NOTE:
3945 insn = x;
3946 while (insn)
3947 {
3948 rtx next = NEXT_INSN (insn);
3949 add_insn_before (insn, before);
3950 last = insn;
3951 insn = next;
3952 }
3953 break;
3954
3955#ifdef ENABLE_RTL_CHECKING
3956 case SEQUENCE:
3957 abort ();
3958 break;
3959#endif
3960
3961 default:
3962 last = make_jump_insn_raw (x);
3963 add_insn_before (last, before);
3964 break;
aff507f4
RK
3965 }
3966
2f937369 3967 return last;
23b2ce53
RS
3968}
3969
2f937369 3970/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
3971 and output it before the instruction BEFORE. */
3972
3973rtx
502b8322 3974emit_call_insn_before (rtx x, rtx before)
969d70ca 3975{
d950dee3 3976 rtx last = NULL_RTX, insn;
969d70ca 3977
2f937369
DM
3978#ifdef ENABLE_RTL_CHECKING
3979 if (before == NULL_RTX)
3980 abort ();
3981#endif
3982
3983 switch (GET_CODE (x))
969d70ca 3984 {
2f937369
DM
3985 case INSN:
3986 case JUMP_INSN:
3987 case CALL_INSN:
3988 case CODE_LABEL:
3989 case BARRIER:
3990 case NOTE:
3991 insn = x;
3992 while (insn)
3993 {
3994 rtx next = NEXT_INSN (insn);
3995 add_insn_before (insn, before);
3996 last = insn;
3997 insn = next;
3998 }
3999 break;
4000
4001#ifdef ENABLE_RTL_CHECKING
4002 case SEQUENCE:
4003 abort ();
4004 break;
4005#endif
4006
4007 default:
4008 last = make_call_insn_raw (x);
4009 add_insn_before (last, before);
4010 break;
969d70ca
JH
4011 }
4012
2f937369 4013 return last;
969d70ca
JH
4014}
4015
23b2ce53 4016/* Make an insn of code BARRIER
e881bb1b 4017 and output it before the insn BEFORE. */
23b2ce53
RS
4018
4019rtx
502b8322 4020emit_barrier_before (rtx before)
23b2ce53 4021{
b3694847 4022 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4023
4024 INSN_UID (insn) = cur_insn_uid++;
4025
a0ae8e8d 4026 add_insn_before (insn, before);
23b2ce53
RS
4027 return insn;
4028}
4029
e881bb1b
RH
4030/* Emit the label LABEL before the insn BEFORE. */
4031
4032rtx
502b8322 4033emit_label_before (rtx label, rtx before)
e881bb1b
RH
4034{
4035 /* This can be called twice for the same label as a result of the
4036 confusion that follows a syntax error! So make it harmless. */
4037 if (INSN_UID (label) == 0)
4038 {
4039 INSN_UID (label) = cur_insn_uid++;
4040 add_insn_before (label, before);
4041 }
4042
4043 return label;
4044}
4045
23b2ce53
RS
4046/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4047
4048rtx
502b8322 4049emit_note_before (int subtype, rtx before)
23b2ce53 4050{
b3694847 4051 rtx note = rtx_alloc (NOTE);
23b2ce53
RS
4052 INSN_UID (note) = cur_insn_uid++;
4053 NOTE_SOURCE_FILE (note) = 0;
4054 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 4055 BLOCK_FOR_INSN (note) = NULL;
23b2ce53 4056
a0ae8e8d 4057 add_insn_before (note, before);
23b2ce53
RS
4058 return note;
4059}
4060\f
2f937369
DM
4061/* Helper for emit_insn_after, handles lists of instructions
4062 efficiently. */
23b2ce53 4063
502b8322 4064static rtx emit_insn_after_1 (rtx, rtx);
2f937369
DM
4065
4066static rtx
502b8322 4067emit_insn_after_1 (rtx first, rtx after)
23b2ce53 4068{
2f937369
DM
4069 rtx last;
4070 rtx after_after;
4071 basic_block bb;
23b2ce53 4072
2f937369
DM
4073 if (GET_CODE (after) != BARRIER
4074 && (bb = BLOCK_FOR_INSN (after)))
23b2ce53 4075 {
2f937369
DM
4076 bb->flags |= BB_DIRTY;
4077 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4078 if (GET_CODE (last) != BARRIER)
4079 set_block_for_insn (last, bb);
4080 if (GET_CODE (last) != BARRIER)
4081 set_block_for_insn (last, bb);
a813c111
SB
4082 if (BB_END (bb) == after)
4083 BB_END (bb) = last;
23b2ce53
RS
4084 }
4085 else
2f937369
DM
4086 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4087 continue;
4088
4089 after_after = NEXT_INSN (after);
4090
4091 NEXT_INSN (after) = first;
4092 PREV_INSN (first) = after;
4093 NEXT_INSN (last) = after_after;
4094 if (after_after)
4095 PREV_INSN (after_after) = last;
4096
4097 if (after == last_insn)
4098 last_insn = last;
4099 return last;
4100}
4101
4102/* Make X be output after the insn AFTER. */
4103
4104rtx
502b8322 4105emit_insn_after (rtx x, rtx after)
2f937369
DM
4106{
4107 rtx last = after;
4108
4109#ifdef ENABLE_RTL_CHECKING
4110 if (after == NULL_RTX)
4111 abort ();
4112#endif
4113
4114 if (x == NULL_RTX)
4115 return last;
4116
4117 switch (GET_CODE (x))
23b2ce53 4118 {
2f937369
DM
4119 case INSN:
4120 case JUMP_INSN:
4121 case CALL_INSN:
4122 case CODE_LABEL:
4123 case BARRIER:
4124 case NOTE:
4125 last = emit_insn_after_1 (x, after);
4126 break;
4127
4128#ifdef ENABLE_RTL_CHECKING
4129 case SEQUENCE:
4130 abort ();
4131 break;
4132#endif
4133
4134 default:
4135 last = make_insn_raw (x);
4136 add_insn_after (last, after);
4137 break;
23b2ce53
RS
4138 }
4139
2f937369 4140 return last;
23b2ce53
RS
4141}
4142
255680cf
RK
4143/* Similar to emit_insn_after, except that line notes are to be inserted so
4144 as to act as if this insn were at FROM. */
4145
4146void
502b8322 4147emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
255680cf
RK
4148{
4149 rtx from_line = find_line_note (from);
4150 rtx after_line = find_line_note (after);
2f937369 4151 rtx insn = emit_insn_after (x, after);
255680cf
RK
4152
4153 if (from_line)
5f2fc772 4154 emit_note_copy_after (from_line, after);
255680cf
RK
4155
4156 if (after_line)
5f2fc772 4157 emit_note_copy_after (after_line, insn);
255680cf
RK
4158}
4159
2f937369 4160/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4161 and output it after the insn AFTER. */
4162
4163rtx
502b8322 4164emit_jump_insn_after (rtx x, rtx after)
23b2ce53 4165{
2f937369 4166 rtx last;
23b2ce53 4167
2f937369
DM
4168#ifdef ENABLE_RTL_CHECKING
4169 if (after == NULL_RTX)
4170 abort ();
4171#endif
4172
4173 switch (GET_CODE (x))
23b2ce53 4174 {
2f937369
DM
4175 case INSN:
4176 case JUMP_INSN:
4177 case CALL_INSN:
4178 case CODE_LABEL:
4179 case BARRIER:
4180 case NOTE:
4181 last = emit_insn_after_1 (x, after);
4182 break;
4183
4184#ifdef ENABLE_RTL_CHECKING
4185 case SEQUENCE:
4186 abort ();
4187 break;
4188#endif
4189
4190 default:
4191 last = make_jump_insn_raw (x);
4192 add_insn_after (last, after);
4193 break;
23b2ce53
RS
4194 }
4195
2f937369
DM
4196 return last;
4197}
4198
4199/* Make an instruction with body X and code CALL_INSN
4200 and output it after the instruction AFTER. */
4201
4202rtx
502b8322 4203emit_call_insn_after (rtx x, rtx after)
2f937369
DM
4204{
4205 rtx last;
4206
4207#ifdef ENABLE_RTL_CHECKING
4208 if (after == NULL_RTX)
4209 abort ();
4210#endif
4211
4212 switch (GET_CODE (x))
4213 {
4214 case INSN:
4215 case JUMP_INSN:
4216 case CALL_INSN:
4217 case CODE_LABEL:
4218 case BARRIER:
4219 case NOTE:
4220 last = emit_insn_after_1 (x, after);
4221 break;
4222
4223#ifdef ENABLE_RTL_CHECKING
4224 case SEQUENCE:
4225 abort ();
4226 break;
4227#endif
4228
4229 default:
4230 last = make_call_insn_raw (x);
4231 add_insn_after (last, after);
4232 break;
4233 }
4234
4235 return last;
23b2ce53
RS
4236}
4237
4238/* Make an insn of code BARRIER
4239 and output it after the insn AFTER. */
4240
4241rtx
502b8322 4242emit_barrier_after (rtx after)
23b2ce53 4243{
b3694847 4244 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4245
4246 INSN_UID (insn) = cur_insn_uid++;
4247
4248 add_insn_after (insn, after);
4249 return insn;
4250}
4251
4252/* Emit the label LABEL after the insn AFTER. */
4253
4254rtx
502b8322 4255emit_label_after (rtx label, rtx after)
23b2ce53
RS
4256{
4257 /* This can be called twice for the same label
4258 as a result of the confusion that follows a syntax error!
4259 So make it harmless. */
4260 if (INSN_UID (label) == 0)
4261 {
4262 INSN_UID (label) = cur_insn_uid++;
4263 add_insn_after (label, after);
4264 }
4265
4266 return label;
4267}
4268
4269/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4270
4271rtx
502b8322 4272emit_note_after (int subtype, rtx after)
23b2ce53 4273{
b3694847 4274 rtx note = rtx_alloc (NOTE);
23b2ce53
RS
4275 INSN_UID (note) = cur_insn_uid++;
4276 NOTE_SOURCE_FILE (note) = 0;
4277 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 4278 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4279 add_insn_after (note, after);
4280 return note;
4281}
4282
5f2fc772 4283/* Emit a copy of note ORIG after the insn AFTER. */
23b2ce53
RS
4284
4285rtx
5f2fc772 4286emit_note_copy_after (rtx orig, rtx after)
23b2ce53 4287{
b3694847 4288 rtx note;
23b2ce53 4289
5f2fc772 4290 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
23b2ce53
RS
4291 {
4292 cur_insn_uid++;
4293 return 0;
4294 }
4295
68252e27 4296 note = rtx_alloc (NOTE);
23b2ce53 4297 INSN_UID (note) = cur_insn_uid++;
5f2fc772
NS
4298 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4299 NOTE_DATA (note) = NOTE_DATA (orig);
ba4f7968 4300 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4301 add_insn_after (note, after);
4302 return note;
4303}
4304\f
0435312e 4305/* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
0d682900 4306rtx
502b8322 4307emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900
JH
4308{
4309 rtx last = emit_insn_after (pattern, after);
0d682900 4310
dd3adcf8
DJ
4311 if (pattern == NULL_RTX)
4312 return last;
4313
2f937369
DM
4314 after = NEXT_INSN (after);
4315 while (1)
4316 {
d11cea13 4317 if (active_insn_p (after))
0435312e 4318 INSN_LOCATOR (after) = loc;
2f937369
DM
4319 if (after == last)
4320 break;
4321 after = NEXT_INSN (after);
4322 }
0d682900
JH
4323 return last;
4324}
4325
0435312e 4326/* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
0d682900 4327rtx
502b8322 4328emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900
JH
4329{
4330 rtx last = emit_jump_insn_after (pattern, after);
2f937369 4331
dd3adcf8
DJ
4332 if (pattern == NULL_RTX)
4333 return last;
4334
2f937369
DM
4335 after = NEXT_INSN (after);
4336 while (1)
4337 {
d11cea13 4338 if (active_insn_p (after))
0435312e 4339 INSN_LOCATOR (after) = loc;
2f937369
DM
4340 if (after == last)
4341 break;
4342 after = NEXT_INSN (after);
4343 }
0d682900
JH
4344 return last;
4345}
4346
0435312e 4347/* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
0d682900 4348rtx
502b8322 4349emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900
JH
4350{
4351 rtx last = emit_call_insn_after (pattern, after);
2f937369 4352
dd3adcf8
DJ
4353 if (pattern == NULL_RTX)
4354 return last;
4355
2f937369
DM
4356 after = NEXT_INSN (after);
4357 while (1)
4358 {
d11cea13 4359 if (active_insn_p (after))
0435312e 4360 INSN_LOCATOR (after) = loc;
2f937369
DM
4361 if (after == last)
4362 break;
4363 after = NEXT_INSN (after);
4364 }
0d682900
JH
4365 return last;
4366}
4367
0435312e 4368/* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
0d682900 4369rtx
502b8322 4370emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4371{
4372 rtx first = PREV_INSN (before);
4373 rtx last = emit_insn_before (pattern, before);
4374
dd3adcf8
DJ
4375 if (pattern == NULL_RTX)
4376 return last;
4377
2f937369
DM
4378 first = NEXT_INSN (first);
4379 while (1)
4380 {
d11cea13 4381 if (active_insn_p (first))
0435312e 4382 INSN_LOCATOR (first) = loc;
2f937369
DM
4383 if (first == last)
4384 break;
4385 first = NEXT_INSN (first);
4386 }
0d682900
JH
4387 return last;
4388}
4389\f
2f937369
DM
4390/* Take X and emit it at the end of the doubly-linked
4391 INSN list.
23b2ce53
RS
4392
4393 Returns the last insn emitted. */
4394
4395rtx
502b8322 4396emit_insn (rtx x)
23b2ce53 4397{
2f937369
DM
4398 rtx last = last_insn;
4399 rtx insn;
23b2ce53 4400
2f937369
DM
4401 if (x == NULL_RTX)
4402 return last;
23b2ce53 4403
2f937369
DM
4404 switch (GET_CODE (x))
4405 {
4406 case INSN:
4407 case JUMP_INSN:
4408 case CALL_INSN:
4409 case CODE_LABEL:
4410 case BARRIER:
4411 case NOTE:
4412 insn = x;
4413 while (insn)
23b2ce53 4414 {
2f937369 4415 rtx next = NEXT_INSN (insn);
23b2ce53 4416 add_insn (insn);
2f937369
DM
4417 last = insn;
4418 insn = next;
23b2ce53 4419 }
2f937369 4420 break;
23b2ce53 4421
2f937369
DM
4422#ifdef ENABLE_RTL_CHECKING
4423 case SEQUENCE:
4424 abort ();
4425 break;
4426#endif
23b2ce53 4427
2f937369
DM
4428 default:
4429 last = make_insn_raw (x);
4430 add_insn (last);
4431 break;
23b2ce53
RS
4432 }
4433
4434 return last;
4435}
4436
2f937369
DM
4437/* Make an insn of code JUMP_INSN with pattern X
4438 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4439
4440rtx
502b8322 4441emit_jump_insn (rtx x)
23b2ce53 4442{
d950dee3 4443 rtx last = NULL_RTX, insn;
23b2ce53 4444
2f937369 4445 switch (GET_CODE (x))
23b2ce53 4446 {
2f937369
DM
4447 case INSN:
4448 case JUMP_INSN:
4449 case CALL_INSN:
4450 case CODE_LABEL:
4451 case BARRIER:
4452 case NOTE:
4453 insn = x;
4454 while (insn)
4455 {
4456 rtx next = NEXT_INSN (insn);
4457 add_insn (insn);
4458 last = insn;
4459 insn = next;
4460 }
4461 break;
e0a5c5eb 4462
2f937369
DM
4463#ifdef ENABLE_RTL_CHECKING
4464 case SEQUENCE:
4465 abort ();
4466 break;
4467#endif
e0a5c5eb 4468
2f937369
DM
4469 default:
4470 last = make_jump_insn_raw (x);
4471 add_insn (last);
4472 break;
3c030e88 4473 }
e0a5c5eb
RS
4474
4475 return last;
4476}
4477
2f937369 4478/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4479 and add it to the end of the doubly-linked list. */
4480
4481rtx
502b8322 4482emit_call_insn (rtx x)
23b2ce53 4483{
2f937369
DM
4484 rtx insn;
4485
4486 switch (GET_CODE (x))
23b2ce53 4487 {
2f937369
DM
4488 case INSN:
4489 case JUMP_INSN:
4490 case CALL_INSN:
4491 case CODE_LABEL:
4492 case BARRIER:
4493 case NOTE:
4494 insn = emit_insn (x);
4495 break;
23b2ce53 4496
2f937369
DM
4497#ifdef ENABLE_RTL_CHECKING
4498 case SEQUENCE:
4499 abort ();
4500 break;
4501#endif
23b2ce53 4502
2f937369
DM
4503 default:
4504 insn = make_call_insn_raw (x);
23b2ce53 4505 add_insn (insn);
2f937369 4506 break;
23b2ce53 4507 }
2f937369
DM
4508
4509 return insn;
23b2ce53
RS
4510}
4511
4512/* Add the label LABEL to the end of the doubly-linked list. */
4513
4514rtx
502b8322 4515emit_label (rtx label)
23b2ce53
RS
4516{
4517 /* This can be called twice for the same label
4518 as a result of the confusion that follows a syntax error!
4519 So make it harmless. */
4520 if (INSN_UID (label) == 0)
4521 {
4522 INSN_UID (label) = cur_insn_uid++;
4523 add_insn (label);
4524 }
4525 return label;
4526}
4527
4528/* Make an insn of code BARRIER
4529 and add it to the end of the doubly-linked list. */
4530
4531rtx
502b8322 4532emit_barrier (void)
23b2ce53 4533{
b3694847 4534 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4535 INSN_UID (barrier) = cur_insn_uid++;
4536 add_insn (barrier);
4537 return barrier;
4538}
4539
0cea056b
NS
4540/* Make line numbering NOTE insn for LOCATION add it to the end
4541 of the doubly-linked list, but only if line-numbers are desired for
4542 debugging info and it doesn't match the previous one. */
23b2ce53
RS
4543
4544rtx
0cea056b 4545emit_line_note (location_t location)
23b2ce53 4546{
2e040219 4547 rtx note;
0cea056b
NS
4548
4549 set_file_and_line_for_stmt (location);
4550
4551 if (location.file && last_location.file
4552 && !strcmp (location.file, last_location.file)
4553 && location.line == last_location.line)
fd3acbb3 4554 return NULL_RTX;
0cea056b
NS
4555 last_location = location;
4556
23b2ce53 4557 if (no_line_numbers)
fd3acbb3
NS
4558 {
4559 cur_insn_uid++;
4560 return NULL_RTX;
4561 }
23b2ce53 4562
0cea056b
NS
4563 note = emit_note (location.line);
4564 NOTE_SOURCE_FILE (note) = location.file;
5f2fc772
NS
4565
4566 return note;
4567}
4568
4569/* Emit a copy of note ORIG. */
502b8322 4570
5f2fc772
NS
4571rtx
4572emit_note_copy (rtx orig)
4573{
4574 rtx note;
4575
4576 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4577 {
4578 cur_insn_uid++;
4579 return NULL_RTX;
4580 }
4581
4582 note = rtx_alloc (NOTE);
4583
4584 INSN_UID (note) = cur_insn_uid++;
4585 NOTE_DATA (note) = NOTE_DATA (orig);
4586 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4587 BLOCK_FOR_INSN (note) = NULL;
4588 add_insn (note);
4589
2e040219 4590 return note;
23b2ce53
RS
4591}
4592
2e040219
NS
4593/* Make an insn of code NOTE or type NOTE_NO
4594 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4595
4596rtx
502b8322 4597emit_note (int note_no)
23b2ce53 4598{
b3694847 4599 rtx note;
23b2ce53 4600
23b2ce53
RS
4601 note = rtx_alloc (NOTE);
4602 INSN_UID (note) = cur_insn_uid++;
2e040219 4603 NOTE_LINE_NUMBER (note) = note_no;
dd107e66 4604 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4605 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4606 add_insn (note);
4607 return note;
4608}
4609
23b2ce53 4610/* Cause next statement to emit a line note even if the line number
0cea056b 4611 has not changed. */
23b2ce53
RS
4612
4613void
502b8322 4614force_next_line_note (void)
23b2ce53 4615{
fd3acbb3 4616 last_location.line = -1;
23b2ce53 4617}
87b47c85
AM
4618
4619/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4620 note of this type already exists, remove it first. */
87b47c85 4621
3d238248 4622rtx
502b8322 4623set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4624{
4625 rtx note = find_reg_note (insn, kind, NULL_RTX);
4626
52488da1
JW
4627 switch (kind)
4628 {
4629 case REG_EQUAL:
4630 case REG_EQUIV:
4631 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4632 has multiple sets (some callers assume single_set
4633 means the insn only has one set, when in fact it
4634 means the insn only has one * useful * set). */
4635 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4636 {
4637 if (note)
4638 abort ();
4639 return NULL_RTX;
4640 }
4641
4642 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4643 It serves no useful purpose and breaks eliminate_regs. */
4644 if (GET_CODE (datum) == ASM_OPERANDS)
4645 return NULL_RTX;
4646 break;
4647
4648 default:
4649 break;
4650 }
3d238248 4651
750c9258 4652 if (note)
3d238248
JJ
4653 {
4654 XEXP (note, 0) = datum;
4655 return note;
4656 }
87b47c85
AM
4657
4658 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3d238248 4659 return REG_NOTES (insn);
87b47c85 4660}
23b2ce53
RS
4661\f
4662/* Return an indication of which type of insn should have X as a body.
4663 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4664
4665enum rtx_code
502b8322 4666classify_insn (rtx x)
23b2ce53
RS
4667{
4668 if (GET_CODE (x) == CODE_LABEL)
4669 return CODE_LABEL;
4670 if (GET_CODE (x) == CALL)
4671 return CALL_INSN;
4672 if (GET_CODE (x) == RETURN)
4673 return JUMP_INSN;
4674 if (GET_CODE (x) == SET)
4675 {
4676 if (SET_DEST (x) == pc_rtx)
4677 return JUMP_INSN;
4678 else if (GET_CODE (SET_SRC (x)) == CALL)
4679 return CALL_INSN;
4680 else
4681 return INSN;
4682 }
4683 if (GET_CODE (x) == PARALLEL)
4684 {
b3694847 4685 int j;
23b2ce53
RS
4686 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4687 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4688 return CALL_INSN;
4689 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4690 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4691 return JUMP_INSN;
4692 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4693 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4694 return CALL_INSN;
4695 }
4696 return INSN;
4697}
4698
4699/* Emit the rtl pattern X as an appropriate kind of insn.
4700 If X is a label, it is simply added into the insn chain. */
4701
4702rtx
502b8322 4703emit (rtx x)
23b2ce53
RS
4704{
4705 enum rtx_code code = classify_insn (x);
4706
4707 if (code == CODE_LABEL)
4708 return emit_label (x);
4709 else if (code == INSN)
4710 return emit_insn (x);
4711 else if (code == JUMP_INSN)
4712 {
b3694847 4713 rtx insn = emit_jump_insn (x);
7f1c097d 4714 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
23b2ce53
RS
4715 return emit_barrier ();
4716 return insn;
4717 }
4718 else if (code == CALL_INSN)
4719 return emit_call_insn (x);
4720 else
4721 abort ();
4722}
4723\f
e2500fed
GK
4724/* Space for free sequence stack entries. */
4725static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4726
5c7a310f
MM
4727/* Begin emitting insns to a sequence which can be packaged in an
4728 RTL_EXPR. If this sequence will contain something that might cause
4729 the compiler to pop arguments to function calls (because those
4730 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4731 details), use do_pending_stack_adjust before calling this function.
4732 That will ensure that the deferred pops are not accidentally
4eb00163 4733 emitted in the middle of this sequence. */
23b2ce53
RS
4734
4735void
502b8322 4736start_sequence (void)
23b2ce53
RS
4737{
4738 struct sequence_stack *tem;
4739
e2500fed
GK
4740 if (free_sequence_stack != NULL)
4741 {
4742 tem = free_sequence_stack;
4743 free_sequence_stack = tem->next;
4744 }
4745 else
703ad42b 4746 tem = ggc_alloc (sizeof (struct sequence_stack));
23b2ce53 4747
49ad7cfa 4748 tem->next = seq_stack;
23b2ce53
RS
4749 tem->first = first_insn;
4750 tem->last = last_insn;
591ccf92 4751 tem->sequence_rtl_expr = seq_rtl_expr;
23b2ce53 4752
49ad7cfa 4753 seq_stack = tem;
23b2ce53
RS
4754
4755 first_insn = 0;
4756 last_insn = 0;
4757}
4758
591ccf92
MM
4759/* Similarly, but indicate that this sequence will be placed in T, an
4760 RTL_EXPR. See the documentation for start_sequence for more
4761 information about how to use this function. */
4762
4763void
502b8322 4764start_sequence_for_rtl_expr (tree t)
591ccf92
MM
4765{
4766 start_sequence ();
4767
4768 seq_rtl_expr = t;
4769}
4770
5c7a310f
MM
4771/* Set up the insn chain starting with FIRST as the current sequence,
4772 saving the previously current one. See the documentation for
4773 start_sequence for more information about how to use this function. */
23b2ce53
RS
4774
4775void
502b8322 4776push_to_sequence (rtx first)
23b2ce53
RS
4777{
4778 rtx last;
4779
4780 start_sequence ();
4781
4782 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4783
4784 first_insn = first;
4785 last_insn = last;
4786}
4787
c14f7160
ML
4788/* Set up the insn chain from a chain stort in FIRST to LAST. */
4789
4790void
502b8322 4791push_to_full_sequence (rtx first, rtx last)
c14f7160
ML
4792{
4793 start_sequence ();
4794 first_insn = first;
4795 last_insn = last;
4796 /* We really should have the end of the insn chain here. */
4797 if (last && NEXT_INSN (last))
4798 abort ();
4799}
4800
f15ae3a1
TW
4801/* Set up the outer-level insn chain
4802 as the current sequence, saving the previously current one. */
4803
4804void
502b8322 4805push_topmost_sequence (void)
f15ae3a1 4806{
aefdd5ab 4807 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
4808
4809 start_sequence ();
4810
49ad7cfa 4811 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4812 top = stack;
4813
4814 first_insn = top->first;
4815 last_insn = top->last;
591ccf92 4816 seq_rtl_expr = top->sequence_rtl_expr;
f15ae3a1
TW
4817}
4818
4819/* After emitting to the outer-level insn chain, update the outer-level
4820 insn chain, and restore the previous saved state. */
4821
4822void
502b8322 4823pop_topmost_sequence (void)
f15ae3a1 4824{
aefdd5ab 4825 struct sequence_stack *stack, *top = NULL;
f15ae3a1 4826
49ad7cfa 4827 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4828 top = stack;
4829
4830 top->first = first_insn;
4831 top->last = last_insn;
591ccf92 4832 /* ??? Why don't we save seq_rtl_expr here? */
f15ae3a1
TW
4833
4834 end_sequence ();
4835}
4836
23b2ce53
RS
4837/* After emitting to a sequence, restore previous saved state.
4838
5c7a310f 4839 To get the contents of the sequence just made, you must call
2f937369 4840 `get_insns' *before* calling here.
5c7a310f
MM
4841
4842 If the compiler might have deferred popping arguments while
4843 generating this sequence, and this sequence will not be immediately
4844 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 4845 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
4846 pops are inserted into this sequence, and not into some random
4847 location in the instruction stream. See INHIBIT_DEFER_POP for more
4848 information about deferred popping of arguments. */
23b2ce53
RS
4849
4850void
502b8322 4851end_sequence (void)
23b2ce53 4852{
49ad7cfa 4853 struct sequence_stack *tem = seq_stack;
23b2ce53
RS
4854
4855 first_insn = tem->first;
4856 last_insn = tem->last;
591ccf92 4857 seq_rtl_expr = tem->sequence_rtl_expr;
49ad7cfa 4858 seq_stack = tem->next;
23b2ce53 4859
e2500fed
GK
4860 memset (tem, 0, sizeof (*tem));
4861 tem->next = free_sequence_stack;
4862 free_sequence_stack = tem;
23b2ce53
RS
4863}
4864
c14f7160
ML
4865/* This works like end_sequence, but records the old sequence in FIRST
4866 and LAST. */
4867
4868void
502b8322 4869end_full_sequence (rtx *first, rtx *last)
c14f7160
ML
4870{
4871 *first = first_insn;
4872 *last = last_insn;
68252e27 4873 end_sequence ();
c14f7160
ML
4874}
4875
23b2ce53
RS
4876/* Return 1 if currently emitting into a sequence. */
4877
4878int
502b8322 4879in_sequence_p (void)
23b2ce53 4880{
49ad7cfa 4881 return seq_stack != 0;
23b2ce53 4882}
23b2ce53 4883\f
59ec66dc
MM
4884/* Put the various virtual registers into REGNO_REG_RTX. */
4885
4886void
502b8322 4887init_virtual_regs (struct emit_status *es)
59ec66dc 4888{
49ad7cfa
BS
4889 rtx *ptr = es->x_regno_reg_rtx;
4890 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4891 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4892 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4893 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4894 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4895}
4896
da43a810
BS
4897\f
4898/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4899static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4900static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4901static int copy_insn_n_scratches;
4902
4903/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4904 copied an ASM_OPERANDS.
4905 In that case, it is the original input-operand vector. */
4906static rtvec orig_asm_operands_vector;
4907
4908/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4909 copied an ASM_OPERANDS.
4910 In that case, it is the copied input-operand vector. */
4911static rtvec copy_asm_operands_vector;
4912
4913/* Likewise for the constraints vector. */
4914static rtvec orig_asm_constraints_vector;
4915static rtvec copy_asm_constraints_vector;
4916
4917/* Recursively create a new copy of an rtx for copy_insn.
4918 This function differs from copy_rtx in that it handles SCRATCHes and
4919 ASM_OPERANDs properly.
4920 Normally, this function is not used directly; use copy_insn as front end.
4921 However, you could first copy an insn pattern with copy_insn and then use
4922 this function afterwards to properly copy any REG_NOTEs containing
4923 SCRATCHes. */
4924
4925rtx
502b8322 4926copy_insn_1 (rtx orig)
da43a810 4927{
b3694847
SS
4928 rtx copy;
4929 int i, j;
4930 RTX_CODE code;
4931 const char *format_ptr;
da43a810
BS
4932
4933 code = GET_CODE (orig);
4934
4935 switch (code)
4936 {
4937 case REG:
4938 case QUEUED:
4939 case CONST_INT:
4940 case CONST_DOUBLE:
69ef87e2 4941 case CONST_VECTOR:
da43a810
BS
4942 case SYMBOL_REF:
4943 case CODE_LABEL:
4944 case PC:
4945 case CC0:
4946 case ADDRESSOF:
4947 return orig;
3e89ed8d
JH
4948 case CLOBBER:
4949 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4950 return orig;
4951 break;
da43a810
BS
4952
4953 case SCRATCH:
4954 for (i = 0; i < copy_insn_n_scratches; i++)
4955 if (copy_insn_scratch_in[i] == orig)
4956 return copy_insn_scratch_out[i];
4957 break;
4958
4959 case CONST:
4960 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4961 a LABEL_REF, it isn't sharable. */
4962 if (GET_CODE (XEXP (orig, 0)) == PLUS
4963 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4964 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4965 return orig;
4966 break;
750c9258 4967
da43a810
BS
4968 /* A MEM with a constant address is not sharable. The problem is that
4969 the constant address may need to be reloaded. If the mem is shared,
4970 then reloading one copy of this mem will cause all copies to appear
4971 to have been reloaded. */
4972
4973 default:
4974 break;
4975 }
4976
4977 copy = rtx_alloc (code);
4978
4979 /* Copy the various flags, and other information. We assume that
4980 all fields need copying, and then clear the fields that should
4981 not be copied. That is the sensible default behavior, and forces
4982 us to explicitly document why we are *not* copying a flag. */
e1de1560 4983 memcpy (copy, orig, RTX_HDR_SIZE);
da43a810
BS
4984
4985 /* We do not copy the USED flag, which is used as a mark bit during
4986 walks over the RTL. */
2adc7f12 4987 RTX_FLAG (copy, used) = 0;
da43a810
BS
4988
4989 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4990 if (GET_RTX_CLASS (code) == 'i')
4991 {
2adc7f12
JJ
4992 RTX_FLAG (copy, jump) = 0;
4993 RTX_FLAG (copy, call) = 0;
4994 RTX_FLAG (copy, frame_related) = 0;
da43a810 4995 }
750c9258 4996
da43a810
BS
4997 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4998
4999 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5000 {
e1de1560 5001 copy->u.fld[i] = orig->u.fld[i];
da43a810
BS
5002 switch (*format_ptr++)
5003 {
5004 case 'e':
da43a810
BS
5005 if (XEXP (orig, i) != NULL)
5006 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5007 break;
5008
da43a810
BS
5009 case 'E':
5010 case 'V':
da43a810
BS
5011 if (XVEC (orig, i) == orig_asm_constraints_vector)
5012 XVEC (copy, i) = copy_asm_constraints_vector;
5013 else if (XVEC (orig, i) == orig_asm_operands_vector)
5014 XVEC (copy, i) = copy_asm_operands_vector;
5015 else if (XVEC (orig, i) != NULL)
5016 {
5017 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5018 for (j = 0; j < XVECLEN (copy, i); j++)
5019 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5020 }
5021 break;
5022
da43a810 5023 case 't':
da43a810 5024 case 'w':
da43a810 5025 case 'i':
da43a810
BS
5026 case 's':
5027 case 'S':
e63db8f6
BS
5028 case 'u':
5029 case '0':
5030 /* These are left unchanged. */
da43a810
BS
5031 break;
5032
5033 default:
5034 abort ();
5035 }
5036 }
5037
5038 if (code == SCRATCH)
5039 {
5040 i = copy_insn_n_scratches++;
5041 if (i >= MAX_RECOG_OPERANDS)
5042 abort ();
5043 copy_insn_scratch_in[i] = orig;
5044 copy_insn_scratch_out[i] = copy;
5045 }
5046 else if (code == ASM_OPERANDS)
5047 {
6462bb43
AO
5048 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5049 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5050 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5051 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5052 }
5053
5054 return copy;
5055}
5056
5057/* Create a new copy of an rtx.
5058 This function differs from copy_rtx in that it handles SCRATCHes and
5059 ASM_OPERANDs properly.
5060 INSN doesn't really have to be a full INSN; it could be just the
5061 pattern. */
5062rtx
502b8322 5063copy_insn (rtx insn)
da43a810
BS
5064{
5065 copy_insn_n_scratches = 0;
5066 orig_asm_operands_vector = 0;
5067 orig_asm_constraints_vector = 0;
5068 copy_asm_operands_vector = 0;
5069 copy_asm_constraints_vector = 0;
5070 return copy_insn_1 (insn);
5071}
59ec66dc 5072
23b2ce53
RS
5073/* Initialize data structures and variables in this file
5074 before generating rtl for each function. */
5075
5076void
502b8322 5077init_emit (void)
23b2ce53 5078{
01d939e8 5079 struct function *f = cfun;
23b2ce53 5080
703ad42b 5081 f->emit = ggc_alloc (sizeof (struct emit_status));
23b2ce53
RS
5082 first_insn = NULL;
5083 last_insn = NULL;
591ccf92 5084 seq_rtl_expr = NULL;
23b2ce53
RS
5085 cur_insn_uid = 1;
5086 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
fd3acbb3
NS
5087 last_location.line = 0;
5088 last_location.file = 0;
23b2ce53
RS
5089 first_label_num = label_num;
5090 last_label_num = 0;
49ad7cfa 5091 seq_stack = NULL;
23b2ce53 5092
23b2ce53
RS
5093 /* Init the tables that describe all the pseudo regs. */
5094
3502dc9c 5095 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5096
49ad7cfa 5097 f->emit->regno_pointer_align
703ad42b
KG
5098 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5099 * sizeof (unsigned char));
86fe05e0 5100
750c9258 5101 regno_reg_rtx
703ad42b 5102 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
0d4903b8 5103
e50126e8 5104 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876
JL
5105 memcpy (regno_reg_rtx,
5106 static_regno_reg_rtx,
5107 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5108
23b2ce53 5109 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
49ad7cfa 5110 init_virtual_regs (f->emit);
740ab4a2
RK
5111
5112 /* Indicate that the virtual registers and stack locations are
5113 all pointers. */
3502dc9c
JDA
5114 REG_POINTER (stack_pointer_rtx) = 1;
5115 REG_POINTER (frame_pointer_rtx) = 1;
5116 REG_POINTER (hard_frame_pointer_rtx) = 1;
5117 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5118
3502dc9c
JDA
5119 REG_POINTER (virtual_incoming_args_rtx) = 1;
5120 REG_POINTER (virtual_stack_vars_rtx) = 1;
5121 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5122 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5123 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5124
86fe05e0 5125#ifdef STACK_BOUNDARY
bdb429a5
RK
5126 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5127 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5128 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5129 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5130
5131 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5132 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5133 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5134 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5135 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5136#endif
5137
5e82e7bd
JVA
5138#ifdef INIT_EXPANDERS
5139 INIT_EXPANDERS;
5140#endif
23b2ce53
RS
5141}
5142
ff88fe10 5143/* Generate the constant 0. */
69ef87e2
AH
5144
5145static rtx
502b8322 5146gen_const_vector_0 (enum machine_mode mode)
69ef87e2
AH
5147{
5148 rtx tem;
5149 rtvec v;
5150 int units, i;
5151 enum machine_mode inner;
5152
5153 units = GET_MODE_NUNITS (mode);
5154 inner = GET_MODE_INNER (mode);
5155
5156 v = rtvec_alloc (units);
5157
5158 /* We need to call this function after we to set CONST0_RTX first. */
5159 if (!CONST0_RTX (inner))
5160 abort ();
5161
5162 for (i = 0; i < units; ++i)
5163 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5164
a06e3c40 5165 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5166 return tem;
5167}
5168
a06e3c40
R
5169/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5170 all elements are zero. */
5171rtx
502b8322 5172gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40
R
5173{
5174 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5175 int i;
5176
5177 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5178 if (RTVEC_ELT (v, i) != inner_zero)
5179 return gen_rtx_raw_CONST_VECTOR (mode, v);
5180 return CONST0_RTX (mode);
5181}
5182
23b2ce53
RS
5183/* Create some permanent unique rtl objects shared between all functions.
5184 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5185
5186void
502b8322 5187init_emit_once (int line_numbers)
23b2ce53
RS
5188{
5189 int i;
5190 enum machine_mode mode;
9ec36da5 5191 enum machine_mode double_mode;
23b2ce53 5192
59e4e217 5193 /* We need reg_raw_mode, so initialize the modes now. */
28420116
PB
5194 init_reg_modes_once ();
5195
5692c7bc
ZW
5196 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5197 tables. */
17211ab5
GK
5198 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5199 const_int_htab_eq, NULL);
173b24b9 5200
17211ab5
GK
5201 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5202 const_double_htab_eq, NULL);
5692c7bc 5203
17211ab5
GK
5204 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5205 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5206 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5207 reg_attrs_htab_eq, NULL);
67673f5c 5208
23b2ce53
RS
5209 no_line_numbers = ! line_numbers;
5210
43fa6302
AS
5211 /* Compute the word and byte modes. */
5212
5213 byte_mode = VOIDmode;
5214 word_mode = VOIDmode;
5215 double_mode = VOIDmode;
5216
5217 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5218 mode = GET_MODE_WIDER_MODE (mode))
5219 {
5220 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5221 && byte_mode == VOIDmode)
5222 byte_mode = mode;
5223
5224 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5225 && word_mode == VOIDmode)
5226 word_mode = mode;
5227 }
5228
43fa6302
AS
5229 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5230 mode = GET_MODE_WIDER_MODE (mode))
5231 {
5232 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5233 && double_mode == VOIDmode)
5234 double_mode = mode;
5235 }
5236
5237 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5238
5da077de
AS
5239 /* Assign register numbers to the globally defined register rtx.
5240 This must be done at runtime because the register number field
5241 is in a union and some compilers can't initialize unions. */
5242
2fb00d7f
KH
5243 pc_rtx = gen_rtx_PC (VOIDmode);
5244 cc0_rtx = gen_rtx_CC0 (VOIDmode);
08394eef
BS
5245 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5246 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5da077de 5247 if (hard_frame_pointer_rtx == 0)
750c9258 5248 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
08394eef 5249 HARD_FRAME_POINTER_REGNUM);
5da077de 5250 if (arg_pointer_rtx == 0)
08394eef 5251 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
750c9258 5252 virtual_incoming_args_rtx =
08394eef 5253 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
750c9258 5254 virtual_stack_vars_rtx =
08394eef 5255 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
750c9258 5256 virtual_stack_dynamic_rtx =
08394eef 5257 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
750c9258
AJ
5258 virtual_outgoing_args_rtx =
5259 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
08394eef 5260 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5da077de 5261
6cde4876
JL
5262 /* Initialize RTL for commonly used hard registers. These are
5263 copied into regno_reg_rtx as we begin to compile each function. */
5264 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5265 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5266
5da077de 5267#ifdef INIT_EXPANDERS
414c4dc4
NC
5268 /* This is to initialize {init|mark|free}_machine_status before the first
5269 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5270 end which calls push_function_context_to before the first call to
5da077de
AS
5271 init_function_start. */
5272 INIT_EXPANDERS;
5273#endif
5274
23b2ce53
RS
5275 /* Create the unique rtx's for certain rtx codes and operand values. */
5276
a2a8cc44 5277 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5278 tries to use these variables. */
23b2ce53 5279 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5280 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5281 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5282
68d75312
JC
5283 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5284 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5285 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5286 else
3b80f6ca 5287 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5288
5692c7bc
ZW
5289 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5290 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5291 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
f7657db9
KG
5292 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5293 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5692c7bc 5294 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
03f2ea93
RS
5295 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5296
5297 dconsthalf = dconst1;
5298 dconsthalf.exp--;
23b2ce53 5299
f7657db9
KG
5300 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5301
ab01a87c
KG
5302 /* Initialize mathematical constants for constant folding builtins.
5303 These constants need to be given to at least 160 bits precision. */
5304 real_from_string (&dconstpi,
5305 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5306 real_from_string (&dconste,
5307 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5308
f7657db9 5309 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5310 {
b216cd4a
ZW
5311 REAL_VALUE_TYPE *r =
5312 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5313
23b2ce53
RS
5314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5315 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5316 const_tiny_rtx[i][(int) mode] =
5317 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5318
906c4e36 5319 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53
RS
5320
5321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5322 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5323 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5324
5325 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5326 mode != VOIDmode;
5327 mode = GET_MODE_WIDER_MODE (mode))
5328 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5329 }
5330
69ef87e2
AH
5331 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5332 mode != VOIDmode;
5333 mode = GET_MODE_WIDER_MODE (mode))
ff88fe10 5334 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
69ef87e2
AH
5335
5336 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5337 mode != VOIDmode;
5338 mode = GET_MODE_WIDER_MODE (mode))
ff88fe10 5339 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
69ef87e2 5340
dbbbbf3b
JDA
5341 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5342 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5343 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5344
f0417c82
RH
5345 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5346 if (STORE_FLAG_VALUE == 1)
5347 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5348
a7e1e2ac
AO
5349#ifdef RETURN_ADDRESS_POINTER_REGNUM
5350 return_address_pointer_rtx
08394eef 5351 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
a7e1e2ac
AO
5352#endif
5353
a7e1e2ac
AO
5354#ifdef STATIC_CHAIN_REGNUM
5355 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5356
5357#ifdef STATIC_CHAIN_INCOMING_REGNUM
5358 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5359 static_chain_incoming_rtx
5360 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5361 else
5362#endif
5363 static_chain_incoming_rtx = static_chain_rtx;
5364#endif
5365
5366#ifdef STATIC_CHAIN
5367 static_chain_rtx = STATIC_CHAIN;
5368
5369#ifdef STATIC_CHAIN_INCOMING
5370 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5371#else
5372 static_chain_incoming_rtx = static_chain_rtx;
5373#endif
5374#endif
5375
fc555370 5376 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
751551d5 5377 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
23b2ce53 5378}
a11759a3
JR
5379\f
5380/* Query and clear/ restore no_line_numbers. This is used by the
5381 switch / case handling in stmt.c to give proper line numbers in
5382 warnings about unreachable code. */
5383
5384int
502b8322 5385force_line_numbers (void)
a11759a3
JR
5386{
5387 int old = no_line_numbers;
5388
5389 no_line_numbers = 0;
5390 if (old)
5391 force_next_line_note ();
5392 return old;
5393}
5394
5395void
502b8322 5396restore_line_number_status (int old_value)
a11759a3
JR
5397{
5398 no_line_numbers = old_value;
5399}
969d70ca
JH
5400
5401/* Produce exact duplicate of insn INSN after AFTER.
5402 Care updating of libcall regions if present. */
5403
5404rtx
502b8322 5405emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca
JH
5406{
5407 rtx new;
5408 rtx note1, note2, link;
5409
5410 switch (GET_CODE (insn))
5411 {
5412 case INSN:
5413 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5414 break;
5415
5416 case JUMP_INSN:
5417 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5418 break;
5419
5420 case CALL_INSN:
5421 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5422 if (CALL_INSN_FUNCTION_USAGE (insn))
5423 CALL_INSN_FUNCTION_USAGE (new)
5424 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5425 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5426 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5427 break;
5428
5429 default:
5430 abort ();
5431 }
5432
5433 /* Update LABEL_NUSES. */
5434 mark_jump_label (PATTERN (new), new, 0);
5435
0435312e 5436 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
ba4f7968 5437
969d70ca
JH
5438 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5439 make them. */
5440 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5441 if (REG_NOTE_KIND (link) != REG_LABEL)
5442 {
5443 if (GET_CODE (link) == EXPR_LIST)
5444 REG_NOTES (new)
5445 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5446 XEXP (link, 0),
5447 REG_NOTES (new)));
5448 else
5449 REG_NOTES (new)
5450 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5451 XEXP (link, 0),
5452 REG_NOTES (new)));
5453 }
5454
5455 /* Fix the libcall sequences. */
5456 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5457 {
5458 rtx p = new;
5459 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5460 p = PREV_INSN (p);
5461 XEXP (note1, 0) = p;
5462 XEXP (note2, 0) = new;
5463 }
6f0d3566 5464 INSN_CODE (new) = INSN_CODE (insn);
969d70ca
JH
5465 return new;
5466}
e2500fed 5467
3e89ed8d
JH
5468static GTY((deletable(""))) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5469rtx
5470gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5471{
5472 if (hard_reg_clobbers[mode][regno])
5473 return hard_reg_clobbers[mode][regno];
5474 else
5475 return (hard_reg_clobbers[mode][regno] =
5476 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5477}
5478
e2500fed 5479#include "gt-emit-rtl.h"