]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
* tree.h (struct tree_decl): Add gimple_formal_temp.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
ddca3e9d 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
7cda9262 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
15bbde2b 4
f12b58b3 5This file is part of GCC.
15bbde2b 6
f12b58b3 7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
15bbde2b 11
f12b58b3 12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
15bbde2b 16
17You should have received a copy of the GNU General Public License
f12b58b3 18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
15bbde2b 21
22
23/* Middle-to-low level generation of rtx code and insns.
24
74efa612 25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
15bbde2b 27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
74efa612 30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
8fd5918e 33 dependent is the kind of rtx's they make and what arguments they
34 use. */
15bbde2b 35
36#include "config.h"
405711de 37#include "system.h"
805e22b2 38#include "coretypes.h"
39#include "tm.h"
d3b64f2d 40#include "toplev.h"
15bbde2b 41#include "rtl.h"
3fd7e17f 42#include "tree.h"
7953c610 43#include "tm_p.h"
15bbde2b 44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
c6b6c51f 48#include "hard-reg-set.h"
73f5c1e3 49#include "hashtab.h"
15bbde2b 50#include "insn-config.h"
0dbd1c74 51#include "recog.h"
15bbde2b 52#include "real.h"
a3426c4c 53#include "bitmap.h"
f3d96a58 54#include "basic-block.h"
a7b0c170 55#include "ggc.h"
b29760a8 56#include "debug.h"
b0278d39 57#include "langhooks.h"
649d8da6 58
399d45d3 59/* Commonly used modes. */
60
a92771b8 61enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9e042f31 63enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
a92771b8 64enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 65
15bbde2b 66
67/* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
9105005a 70static GTY(()) int label_num = 1;
15bbde2b 71
15bbde2b 72/* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
75
76static int last_label_num;
77
dab001aa 78/* Value label_num had when set_new_last_label_num was called.
15bbde2b 79 If label_num has not changed since then, last_label_num is valid. */
80
81static int base_label_num;
82
83/* Nonzero means do not generate NOTEs for source line numbers. */
84
85static int no_line_numbers;
86
87/* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
2ff23ed0 89 All of these are unique; no other rtx-object will be equal to any
90 of these. */
15bbde2b 91
57c097d5 92rtx global_rtl[GR_MAX];
15bbde2b 93
90295bd2 94/* Commonly used RTL for hard registers. These objects are not necessarily
95 unique, so we allocate them separately from global_rtl. They are
96 initialized once per compilation unit, then copied into regno_reg_rtx
97 at the beginning of each function. */
98static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
99
15bbde2b 100/* We record floating-point CONST_DOUBLEs in each floating-point mode for
101 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
102 record a copy of const[012]_rtx. */
103
104rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
105
1a60f06a 106rtx const_true_rtx;
107
15bbde2b 108REAL_VALUE_TYPE dconst0;
109REAL_VALUE_TYPE dconst1;
110REAL_VALUE_TYPE dconst2;
8918c507 111REAL_VALUE_TYPE dconst3;
112REAL_VALUE_TYPE dconst10;
15bbde2b 113REAL_VALUE_TYPE dconstm1;
77e89269 114REAL_VALUE_TYPE dconstm2;
115REAL_VALUE_TYPE dconsthalf;
8918c507 116REAL_VALUE_TYPE dconstthird;
79082756 117REAL_VALUE_TYPE dconstpi;
118REAL_VALUE_TYPE dconste;
15bbde2b 119
120/* All references to the following fixed hard registers go through
121 these unique rtl objects. On machines where the frame-pointer and
122 arg-pointer are the same register, they use the same unique object.
123
124 After register allocation, other rtl objects which used to be pseudo-regs
125 may be clobbered to refer to the frame-pointer register.
126 But references that were originally to the frame-pointer can be
127 distinguished from the others because they contain frame_pointer_rtx.
128
90c25824 129 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
130 tricky: until register elimination has taken place hard_frame_pointer_rtx
d823ba47 131 should be used if it is being set, and frame_pointer_rtx otherwise. After
90c25824 132 register elimination hard_frame_pointer_rtx should always be used.
133 On machines where the two registers are same (most) then these are the
134 same.
135
15bbde2b 136 In an inline procedure, the stack and frame pointer rtxs may not be
137 used for anything else. */
15bbde2b 138rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
139rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
140rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141
4b0c5859 142/* This is used to implement __builtin_return_address for some machines.
143 See for instance the MIPS port. */
144rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145
15bbde2b 146/* We make one copy of (const_int C) where C is in
147 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
148 to save space during the compilation and simplify comparisons of
149 integers. */
150
57c097d5 151rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 152
73f5c1e3 153/* A hash table storing CONST_INTs whose absolute value is greater
154 than MAX_SAVED_CONST_INT. */
155
1f3233d1 156static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
157 htab_t const_int_htab;
73f5c1e3 158
c6259b83 159/* A hash table storing memory attribute structures. */
1f3233d1 160static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
161 htab_t mem_attrs_htab;
c6259b83 162
ca74b940 163/* A hash table storing register attribute structures. */
164static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
165 htab_t reg_attrs_htab;
166
2ff23ed0 167/* A hash table storing all CONST_DOUBLEs. */
1f3233d1 168static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_double_htab;
2ff23ed0 170
08513b52 171#define first_insn (cfun->emit->x_first_insn)
172#define last_insn (cfun->emit->x_last_insn)
173#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
0a3b3d88 174#define last_location (cfun->emit->x_last_location)
08513b52 175#define first_label_num (cfun->emit->x_first_label_num)
15bbde2b 176
35cb5232 177static rtx make_jump_insn_raw (rtx);
178static rtx make_call_insn_raw (rtx);
179static rtx find_line_note (rtx);
180static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
35cb5232 181static void unshare_all_decls (tree);
182static void reset_used_decls (tree);
183static void mark_label_nuses (rtx);
184static hashval_t const_int_htab_hash (const void *);
185static int const_int_htab_eq (const void *, const void *);
186static hashval_t const_double_htab_hash (const void *);
187static int const_double_htab_eq (const void *, const void *);
188static rtx lookup_const_double (rtx);
189static hashval_t mem_attrs_htab_hash (const void *);
190static int mem_attrs_htab_eq (const void *, const void *);
191static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
192 enum machine_mode);
193static hashval_t reg_attrs_htab_hash (const void *);
194static int reg_attrs_htab_eq (const void *, const void *);
195static reg_attrs *get_reg_attrs (tree, int);
196static tree component_ref_for_mem_expr (tree);
197static rtx gen_const_vector_0 (enum machine_mode);
198static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
0e0727c4 199static void copy_rtx_if_shared_1 (rtx *orig);
73f5c1e3 200
3cd757b1 201/* Probability of the conditional branch currently proceeded by try_split.
202 Set to -1 otherwise. */
203int split_branch_probability = -1;
649d8da6 204\f
73f5c1e3 205/* Returns a hash code for X (which is a really a CONST_INT). */
206
207static hashval_t
35cb5232 208const_int_htab_hash (const void *x)
73f5c1e3 209{
00ea394c 210 return (hashval_t) INTVAL ((rtx) x);
73f5c1e3 211}
212
6ef828f9 213/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 214 CONST_INT) is the same as that given by Y (which is really a
215 HOST_WIDE_INT *). */
216
217static int
35cb5232 218const_int_htab_eq (const void *x, const void *y)
73f5c1e3 219{
2ff23ed0 220 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
221}
222
223/* Returns a hash code for X (which is really a CONST_DOUBLE). */
224static hashval_t
35cb5232 225const_double_htab_hash (const void *x)
2ff23ed0 226{
2ff23ed0 227 rtx value = (rtx) x;
3393215f 228 hashval_t h;
2ff23ed0 229
3393215f 230 if (GET_MODE (value) == VOIDmode)
231 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
232 else
a5760913 233 {
e2e205b3 234 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 235 /* MODE is used in the comparison, so it should be in the hash. */
236 h ^= GET_MODE (value);
237 }
2ff23ed0 238 return h;
239}
240
6ef828f9 241/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 242 is the same as that represented by Y (really a ...) */
243static int
35cb5232 244const_double_htab_eq (const void *x, const void *y)
2ff23ed0 245{
246 rtx a = (rtx)x, b = (rtx)y;
2ff23ed0 247
248 if (GET_MODE (a) != GET_MODE (b))
249 return 0;
f82a103d 250 if (GET_MODE (a) == VOIDmode)
251 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
252 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
253 else
254 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
255 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 256}
257
c6259b83 258/* Returns a hash code for X (which is a really a mem_attrs *). */
259
260static hashval_t
35cb5232 261mem_attrs_htab_hash (const void *x)
c6259b83 262{
263 mem_attrs *p = (mem_attrs *) x;
264
265 return (p->alias ^ (p->align * 1000)
266 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
267 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
b10dbbca 268 ^ (size_t) p->expr);
c6259b83 269}
270
6ef828f9 271/* Returns nonzero if the value represented by X (which is really a
c6259b83 272 mem_attrs *) is the same as that given by Y (which is also really a
273 mem_attrs *). */
73f5c1e3 274
275static int
35cb5232 276mem_attrs_htab_eq (const void *x, const void *y)
73f5c1e3 277{
c6259b83 278 mem_attrs *p = (mem_attrs *) x;
279 mem_attrs *q = (mem_attrs *) y;
280
b10dbbca 281 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
c6259b83 282 && p->size == q->size && p->align == q->align);
73f5c1e3 283}
284
c6259b83 285/* Allocate a new mem_attrs structure and insert it into the hash table if
5cc193e7 286 one identical to it is not already in the table. We are doing this for
287 MEM of mode MODE. */
c6259b83 288
289static mem_attrs *
35cb5232 290get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
291 unsigned int align, enum machine_mode mode)
c6259b83 292{
293 mem_attrs attrs;
294 void **slot;
295
d5c80165 296 /* If everything is the default, we can just return zero.
297 This must match what the corresponding MEM_* macros return when the
298 field is not present. */
b10dbbca 299 if (alias == 0 && expr == 0 && offset == 0
5cc193e7 300 && (size == 0
301 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
d5c80165 302 && (STRICT_ALIGNMENT && mode != BLKmode
303 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
5cc193e7 304 return 0;
305
c6259b83 306 attrs.alias = alias;
b10dbbca 307 attrs.expr = expr;
c6259b83 308 attrs.offset = offset;
309 attrs.size = size;
310 attrs.align = align;
311
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
313 if (*slot == 0)
314 {
315 *slot = ggc_alloc (sizeof (mem_attrs));
316 memcpy (*slot, &attrs, sizeof (mem_attrs));
317 }
318
319 return *slot;
73f5c1e3 320}
321
ca74b940 322/* Returns a hash code for X (which is a really a reg_attrs *). */
323
324static hashval_t
35cb5232 325reg_attrs_htab_hash (const void *x)
ca74b940 326{
327 reg_attrs *p = (reg_attrs *) x;
328
329 return ((p->offset * 1000) ^ (long) p->decl);
330}
331
7ef5b942 332/* Returns nonzero if the value represented by X (which is really a
ca74b940 333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
335
336static int
35cb5232 337reg_attrs_htab_eq (const void *x, const void *y)
ca74b940 338{
339 reg_attrs *p = (reg_attrs *) x;
340 reg_attrs *q = (reg_attrs *) y;
341
342 return (p->decl == q->decl && p->offset == q->offset);
343}
344/* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
347
348static reg_attrs *
35cb5232 349get_reg_attrs (tree decl, int offset)
ca74b940 350{
351 reg_attrs attrs;
352 void **slot;
353
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
357
358 attrs.decl = decl;
359 attrs.offset = offset;
360
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
363 {
364 *slot = ggc_alloc (sizeof (reg_attrs));
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
366 }
367
368 return *slot;
369}
370
22cf44bc 371/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
372 don't attempt to share with the various global pieces of rtl (such as
373 frame_pointer_rtx). */
374
375rtx
35cb5232 376gen_raw_REG (enum machine_mode mode, int regno)
22cf44bc 377{
378 rtx x = gen_rtx_raw_REG (mode, regno);
379 ORIGINAL_REGNO (x) = regno;
380 return x;
381}
382
7014838c 383/* There are some RTL codes that require special attention; the generation
384 functions do the raw handling. If you add to this list, modify
385 special_rtx in gengenrtl.c as well. */
386
3ad7bb1c 387rtx
35cb5232 388gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3ad7bb1c 389{
73f5c1e3 390 void **slot;
391
3ad7bb1c 392 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 393 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 394
395#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
396 if (const_true_rtx && arg == STORE_FLAG_VALUE)
397 return const_true_rtx;
398#endif
399
73f5c1e3 400 /* Look up the CONST_INT in the hash table. */
2b3dbc20 401 slot = htab_find_slot_with_hash (const_int_htab, &arg,
402 (hashval_t) arg, INSERT);
7f2875d3 403 if (*slot == 0)
d7c47c0e 404 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 405
406 return (rtx) *slot;
3ad7bb1c 407}
408
2d232d05 409rtx
35cb5232 410gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2d232d05 411{
412 return GEN_INT (trunc_int_for_mode (c, mode));
413}
414
2ff23ed0 415/* CONST_DOUBLEs might be created from pairs of integers, or from
416 REAL_VALUE_TYPEs. Also, their length is known only at run time,
417 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
418
419/* Determine whether REAL, a CONST_DOUBLE, already exists in the
420 hash table. If so, return its counterpart; otherwise add it
421 to the hash table and return it. */
422static rtx
35cb5232 423lookup_const_double (rtx real)
2ff23ed0 424{
425 void **slot = htab_find_slot (const_double_htab, real, INSERT);
426 if (*slot == 0)
427 *slot = real;
428
429 return (rtx) *slot;
430}
7f2875d3 431
2ff23ed0 432/* Return a CONST_DOUBLE rtx for a floating-point value specified by
433 VALUE in mode MODE. */
67f2a2eb 434rtx
35cb5232 435const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
67f2a2eb 436{
2ff23ed0 437 rtx real = rtx_alloc (CONST_DOUBLE);
438 PUT_MODE (real, mode);
439
440 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
441
442 return lookup_const_double (real);
443}
444
445/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
446 of ints: I0 is the low-order word and I1 is the high-order word.
447 Do not use this routine for non-integer modes; convert to
448 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
449
450rtx
35cb5232 451immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
2ff23ed0 452{
453 rtx value;
454 unsigned int i;
455
456 if (mode != VOIDmode)
457 {
458 int width;
459 if (GET_MODE_CLASS (mode) != MODE_INT
ead34f59 460 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
461 /* We can get a 0 for an error mark. */
462 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
463 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
2ff23ed0 464 abort ();
465
466 /* We clear out all bits that don't belong in MODE, unless they and
467 our sign bit are all one. So we get either a reasonable negative
468 value or a reasonable unsigned value for this mode. */
469 width = GET_MODE_BITSIZE (mode);
470 if (width < HOST_BITS_PER_WIDE_INT
471 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
472 != ((HOST_WIDE_INT) (-1) << (width - 1))))
473 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
474 else if (width == HOST_BITS_PER_WIDE_INT
475 && ! (i1 == ~0 && i0 < 0))
476 i1 = 0;
477 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
478 /* We cannot represent this value as a constant. */
479 abort ();
480
481 /* If this would be an entire word for the target, but is not for
482 the host, then sign-extend on the host so that the number will
483 look the same way on the host that it would on the target.
484
485 For example, when building a 64 bit alpha hosted 32 bit sparc
486 targeted compiler, then we want the 32 bit unsigned value -1 to be
487 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
488 The latter confuses the sparc backend. */
489
490 if (width < HOST_BITS_PER_WIDE_INT
491 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
492 i0 |= ((HOST_WIDE_INT) (-1) << width);
4e929432 493
2ff23ed0 494 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
495 CONST_INT.
4e929432 496
2ff23ed0 497 ??? Strictly speaking, this is wrong if we create a CONST_INT for
498 a large unsigned constant with the size of MODE being
499 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
500 in a wider mode. In that case we will mis-interpret it as a
501 negative number.
4e929432 502
2ff23ed0 503 Unfortunately, the only alternative is to make a CONST_DOUBLE for
504 any constant in any mode if it is an unsigned constant larger
505 than the maximum signed integer in an int on the host. However,
506 doing this will break everyone that always expects to see a
507 CONST_INT for SImode and smaller.
508
509 We have always been making CONST_INTs in this case, so nothing
510 new is being broken. */
511
512 if (width <= HOST_BITS_PER_WIDE_INT)
513 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
514 }
515
516 /* If this integer fits in one word, return a CONST_INT. */
517 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
518 return GEN_INT (i0);
519
520 /* We use VOIDmode for integers. */
521 value = rtx_alloc (CONST_DOUBLE);
522 PUT_MODE (value, VOIDmode);
523
524 CONST_DOUBLE_LOW (value) = i0;
525 CONST_DOUBLE_HIGH (value) = i1;
526
527 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
528 XWINT (value, i) = 0;
529
530 return lookup_const_double (value);
67f2a2eb 531}
532
3ad7bb1c 533rtx
35cb5232 534gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3ad7bb1c 535{
536 /* In case the MD file explicitly references the frame pointer, have
537 all such references point to the same frame pointer. This is
538 used during frame pointer elimination to distinguish the explicit
539 references to these registers from pseudos that happened to be
540 assigned to them.
541
542 If we have eliminated the frame pointer or arg pointer, we will
543 be using it as a normal register, for example as a spill
544 register. In such cases, we might be accessing it in a mode that
545 is not Pmode and therefore cannot use the pre-allocated rtx.
546
547 Also don't do this when we are making new REGs in reload, since
548 we don't want to get confused with the real pointers. */
549
550 if (mode == Pmode && !reload_in_progress)
551 {
71801afc 552 if (regno == FRAME_POINTER_REGNUM
553 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 554 return frame_pointer_rtx;
555#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
71801afc 556 if (regno == HARD_FRAME_POINTER_REGNUM
557 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 558 return hard_frame_pointer_rtx;
559#endif
560#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
e8b59353 561 if (regno == ARG_POINTER_REGNUM)
3ad7bb1c 562 return arg_pointer_rtx;
563#endif
564#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 565 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 566 return return_address_pointer_rtx;
567#endif
3473aefe 568 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
6ea47475 569 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 570 return pic_offset_table_rtx;
e8b59353 571 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 572 return stack_pointer_rtx;
573 }
574
32b53d83 575#if 0
90295bd2 576 /* If the per-function register table has been set up, try to re-use
32b53d83 577 an existing entry in that table to avoid useless generation of RTL.
578
579 This code is disabled for now until we can fix the various backends
580 which depend on having non-shared hard registers in some cases. Long
581 term we want to re-enable this code as it can significantly cut down
71801afc 582 on the amount of useless RTL that gets generated.
583
584 We'll also need to fix some code that runs after reload that wants to
585 set ORIGINAL_REGNO. */
586
90295bd2 587 if (cfun
588 && cfun->emit
589 && regno_reg_rtx
590 && regno < FIRST_PSEUDO_REGISTER
591 && reg_raw_mode[regno] == mode)
592 return regno_reg_rtx[regno];
32b53d83 593#endif
90295bd2 594
22cf44bc 595 return gen_raw_REG (mode, regno);
3ad7bb1c 596}
597
b5ba9f3a 598rtx
35cb5232 599gen_rtx_MEM (enum machine_mode mode, rtx addr)
b5ba9f3a 600{
601 rtx rt = gen_rtx_raw_MEM (mode, addr);
602
603 /* This field is not cleared by the mere allocation of the rtx, so
604 we clear it here. */
c6259b83 605 MEM_ATTRS (rt) = 0;
b5ba9f3a 606
607 return rt;
608}
701e46d0 609
610rtx
35cb5232 611gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
701e46d0 612{
613 /* This is the most common failure type.
614 Catch it early so we can see who does it. */
615 if ((offset % GET_MODE_SIZE (mode)) != 0)
616 abort ();
617
618 /* This check isn't usable right now because combine will
619 throw arbitrary crap like a CALL into a SUBREG in
620 gen_lowpart_for_combine so we must just eat it. */
621#if 0
622 /* Check for this too. */
623 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
624 abort ();
625#endif
2ff23ed0 626 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 627}
628
c6259b83 629/* Generate a SUBREG representing the least-significant part of REG if MODE
630 is smaller than mode of REG, otherwise paradoxical SUBREG. */
631
701e46d0 632rtx
35cb5232 633gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
701e46d0 634{
635 enum machine_mode inmode;
701e46d0 636
637 inmode = GET_MODE (reg);
638 if (inmode == VOIDmode)
639 inmode = mode;
81802af6 640 return gen_rtx_SUBREG (mode, reg,
641 subreg_lowpart_offset (mode, inmode));
701e46d0 642}
7014838c 643\f
15bbde2b 644/* gen_rtvec (n, [rt1, ..., rtn])
645**
646** This routine creates an rtvec and stores within it the
647** pointers to rtx's which are its arguments.
648*/
649
650/*VARARGS1*/
651rtvec
ee582a61 652gen_rtvec (int n, ...)
15bbde2b 653{
7ad77798 654 int i, save_n;
15bbde2b 655 rtx *vector;
ee582a61 656 va_list p;
15bbde2b 657
ee582a61 658 va_start (p, n);
15bbde2b 659
660 if (n == 0)
661 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
662
f0af5a88 663 vector = alloca (n * sizeof (rtx));
e5fcd76a 664
15bbde2b 665 for (i = 0; i < n; i++)
666 vector[i] = va_arg (p, rtx);
7ad77798 667
668 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
669 save_n = n;
ee582a61 670 va_end (p);
15bbde2b 671
7ad77798 672 return gen_rtvec_v (save_n, vector);
15bbde2b 673}
674
675rtvec
35cb5232 676gen_rtvec_v (int n, rtx *argp)
15bbde2b 677{
19cb6b50 678 int i;
679 rtvec rt_val;
15bbde2b 680
681 if (n == 0)
682 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
683
684 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
685
686 for (i = 0; i < n; i++)
a4070a91 687 rt_val->elem[i] = *argp++;
15bbde2b 688
689 return rt_val;
690}
691\f
692/* Generate a REG rtx for a new pseudo register of mode MODE.
693 This pseudo is assigned the next sequential register number. */
694
695rtx
35cb5232 696gen_reg_rtx (enum machine_mode mode)
15bbde2b 697{
08513b52 698 struct function *f = cfun;
19cb6b50 699 rtx val;
15bbde2b 700
9e519b97 701 /* Don't let anything called after initial flow analysis create new
702 registers. */
703 if (no_new_pseudos)
15bbde2b 704 abort ();
705
316bc009 706 if (generating_concat_p
707 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
708 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 709 {
710 /* For complex modes, don't make a single pseudo.
711 Instead, make a CONCAT of two pseudos.
712 This allows noncontiguous allocation of the real and imaginary parts,
713 which makes much better code. Besides, allocating DCmode
714 pseudos overstrains reload on some machines like the 386. */
715 rtx realpart, imagpart;
e9e12845 716 enum machine_mode partmode = GET_MODE_INNER (mode);
76c37538 717
718 realpart = gen_reg_rtx (partmode);
719 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 720 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 721 }
722
ca74b940 723 /* Make sure regno_pointer_align, and regno_reg_rtx are large
fcdc122e 724 enough to have an element for this pseudo reg number. */
15bbde2b 725
e61a0a7f 726 if (reg_rtx_no == f->emit->regno_pointer_align_length)
15bbde2b 727 {
e61a0a7f 728 int old_size = f->emit->regno_pointer_align_length;
26df1c5e 729 char *new;
fcdc122e 730 rtx *new1;
fcdc122e 731
1f3233d1 732 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
0a893c29 733 memset (new + old_size, 0, old_size);
4491f79f 734 f->emit->regno_pointer_align = (unsigned char *) new;
0a893c29 735
f0af5a88 736 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
737 old_size * 2 * sizeof (rtx));
0a893c29 738 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 739 regno_reg_rtx = new1;
740
e61a0a7f 741 f->emit->regno_pointer_align_length = old_size * 2;
15bbde2b 742 }
743
22cf44bc 744 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 745 regno_reg_rtx[reg_rtx_no++] = val;
746 return val;
747}
748
765f486d 749/* Generate a register with same attributes as REG, but offsetted by OFFSET.
750 Do the big endian correction if needed. */
ca74b940 751
752rtx
35cb5232 753gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
ca74b940 754{
755 rtx new = gen_rtx_REG (mode, regno);
765f486d 756 tree decl;
757 HOST_WIDE_INT var_size;
758
759 /* PR middle-end/14084
760 The problem appears when a variable is stored in a larger register
761 and later it is used in the original mode or some mode in between
762 or some part of variable is accessed.
763
764 On little endian machines there is no problem because
765 the REG_OFFSET of the start of the variable is the same when
766 accessed in any mode (it is 0).
767
768 However, this is not true on big endian machines.
769 The offset of the start of the variable is different when accessed
770 in different modes.
771 When we are taking a part of the REG we have to change the OFFSET
772 from offset WRT size of mode of REG to offset WRT size of variable.
773
774 If we would not do the big endian correction the resulting REG_OFFSET
775 would be larger than the size of the DECL.
776
777 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
778
779 REG.mode MODE DECL size old offset new offset description
780 DI SI 4 4 0 int32 in SImode
781 DI SI 1 4 0 char in SImode
782 DI QI 1 7 0 char in QImode
783 DI QI 4 5 1 1st element in QImode
784 of char[4]
785 DI HI 4 6 2 1st element in HImode
786 of int16[2]
787
788 If the size of DECL is equal or greater than the size of REG
789 we can't do this correction because the register holds the
790 whole variable or a part of the variable and thus the REG_OFFSET
791 is already correct. */
792
793 decl = REG_EXPR (reg);
794 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
795 && decl != NULL
796 && offset > 0
797 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
798 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
799 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
800 {
801 int offset_le;
802
803 /* Convert machine endian to little endian WRT size of mode of REG. */
804 if (WORDS_BIG_ENDIAN)
805 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
806 / UNITS_PER_WORD) * UNITS_PER_WORD;
807 else
808 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
809
810 if (BYTES_BIG_ENDIAN)
811 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
812 % UNITS_PER_WORD);
813 else
814 offset_le += offset % UNITS_PER_WORD;
815
816 if (offset_le >= var_size)
817 {
818 /* MODE is wider than the variable so the new reg will cover
819 the whole variable so the resulting OFFSET should be 0. */
820 offset = 0;
821 }
822 else
823 {
824 /* Convert little endian to machine endian WRT size of variable. */
825 if (WORDS_BIG_ENDIAN)
826 offset = ((var_size - 1 - offset_le)
827 / UNITS_PER_WORD) * UNITS_PER_WORD;
828 else
829 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
830
831 if (BYTES_BIG_ENDIAN)
832 offset += ((var_size - 1 - offset_le)
833 % UNITS_PER_WORD);
834 else
835 offset += offset_le % UNITS_PER_WORD;
836 }
837 }
838
ca74b940 839 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
35cb5232 840 REG_OFFSET (reg) + offset);
ca74b940 841 return new;
842}
843
844/* Set the decl for MEM to DECL. */
845
846void
35cb5232 847set_reg_attrs_from_mem (rtx reg, rtx mem)
ca74b940 848{
849 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
850 REG_ATTRS (reg)
851 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
852}
853
263c416c 854/* Set the register attributes for registers contained in PARM_RTX.
855 Use needed values from memory attributes of MEM. */
856
857void
35cb5232 858set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
263c416c 859{
8ad4c111 860 if (REG_P (parm_rtx))
263c416c 861 set_reg_attrs_from_mem (parm_rtx, mem);
862 else if (GET_CODE (parm_rtx) == PARALLEL)
863 {
864 /* Check for a NULL entry in the first slot, used to indicate that the
865 parameter goes both on the stack and in registers. */
866 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
867 for (; i < XVECLEN (parm_rtx, 0); i++)
868 {
869 rtx x = XVECEXP (parm_rtx, 0, i);
8ad4c111 870 if (REG_P (XEXP (x, 0)))
263c416c 871 REG_ATTRS (XEXP (x, 0))
872 = get_reg_attrs (MEM_EXPR (mem),
873 INTVAL (XEXP (x, 1)));
874 }
875 }
876}
877
ca74b940 878/* Assign the RTX X to declaration T. */
879void
35cb5232 880set_decl_rtl (tree t, rtx x)
ca74b940 881{
882 DECL_CHECK (t)->decl.rtl = x;
883
ebfc27f5 884 if (!x)
885 return;
886 /* For register, we maintain the reverse information too. */
8ad4c111 887 if (REG_P (x))
ebfc27f5 888 REG_ATTRS (x) = get_reg_attrs (t, 0);
889 else if (GET_CODE (x) == SUBREG)
890 REG_ATTRS (SUBREG_REG (x))
891 = get_reg_attrs (t, -SUBREG_BYTE (x));
892 if (GET_CODE (x) == CONCAT)
893 {
894 if (REG_P (XEXP (x, 0)))
895 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
896 if (REG_P (XEXP (x, 1)))
897 REG_ATTRS (XEXP (x, 1))
898 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
899 }
900 if (GET_CODE (x) == PARALLEL)
901 {
902 int i;
903 for (i = 0; i < XVECLEN (x, 0); i++)
904 {
905 rtx y = XVECEXP (x, 0, i);
906 if (REG_P (XEXP (y, 0)))
907 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
908 }
909 }
910}
911
912/* Assign the RTX X to parameter declaration T. */
913void
914set_decl_incoming_rtl (tree t, rtx x)
915{
916 DECL_INCOMING_RTL (t) = x;
917
ca74b940 918 if (!x)
919 return;
917bbcab 920 /* For register, we maintain the reverse information too. */
8ad4c111 921 if (REG_P (x))
ca74b940 922 REG_ATTRS (x) = get_reg_attrs (t, 0);
923 else if (GET_CODE (x) == SUBREG)
924 REG_ATTRS (SUBREG_REG (x))
925 = get_reg_attrs (t, -SUBREG_BYTE (x));
926 if (GET_CODE (x) == CONCAT)
927 {
928 if (REG_P (XEXP (x, 0)))
929 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
930 if (REG_P (XEXP (x, 1)))
931 REG_ATTRS (XEXP (x, 1))
932 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
933 }
934 if (GET_CODE (x) == PARALLEL)
935 {
85d25060 936 int i, start;
937
938 /* Check for a NULL entry, used to indicate that the parameter goes
939 both on the stack and in registers. */
940 if (XEXP (XVECEXP (x, 0, 0), 0))
941 start = 0;
942 else
943 start = 1;
944
945 for (i = start; i < XVECLEN (x, 0); i++)
ca74b940 946 {
947 rtx y = XVECEXP (x, 0, i);
948 if (REG_P (XEXP (y, 0)))
949 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
950 }
951 }
952}
953
de8ecfb5 954/* Identify REG (which may be a CONCAT) as a user register. */
955
956void
35cb5232 957mark_user_reg (rtx reg)
de8ecfb5 958{
959 if (GET_CODE (reg) == CONCAT)
960 {
961 REG_USERVAR_P (XEXP (reg, 0)) = 1;
962 REG_USERVAR_P (XEXP (reg, 1)) = 1;
963 }
8ad4c111 964 else if (REG_P (reg))
de8ecfb5 965 REG_USERVAR_P (reg) = 1;
966 else
967 abort ();
968}
969
d4c332ff 970/* Identify REG as a probable pointer register and show its alignment
971 as ALIGN, if nonzero. */
15bbde2b 972
973void
35cb5232 974mark_reg_pointer (rtx reg, int align)
15bbde2b 975{
e61a0a7f 976 if (! REG_POINTER (reg))
612409a6 977 {
e61a0a7f 978 REG_POINTER (reg) = 1;
d4c332ff 979
612409a6 980 if (align)
981 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
982 }
983 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
8b332087 984 /* We can no-longer be sure just how aligned this pointer is. */
d4c332ff 985 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 986}
987
988/* Return 1 plus largest pseudo reg number used in the current function. */
989
990int
35cb5232 991max_reg_num (void)
15bbde2b 992{
993 return reg_rtx_no;
994}
995
996/* Return 1 + the largest label number used so far in the current function. */
997
998int
35cb5232 999max_label_num (void)
15bbde2b 1000{
1001 if (last_label_num && label_num == base_label_num)
1002 return last_label_num;
1003 return label_num;
1004}
1005
1006/* Return first label number used in this function (if any were used). */
1007
1008int
35cb5232 1009get_first_label_num (void)
15bbde2b 1010{
1011 return first_label_num;
1012}
4ee9c684 1013
1014/* If the rtx for label was created during the expansion of a nested
1015 function, then first_label_num won't include this label number.
1016 Fix this now so that array indicies work later. */
1017
1018void
1019maybe_set_first_label_num (rtx x)
1020{
1021 if (CODE_LABEL_NUMBER (x) < first_label_num)
1022 first_label_num = CODE_LABEL_NUMBER (x);
1023}
15bbde2b 1024\f
701e46d0 1025/* Return the final regno of X, which is a SUBREG of a hard
1026 register. */
1027int
35cb5232 1028subreg_hard_regno (rtx x, int check_mode)
701e46d0 1029{
1030 enum machine_mode mode = GET_MODE (x);
1031 unsigned int byte_offset, base_regno, final_regno;
1032 rtx reg = SUBREG_REG (x);
1033
1034 /* This is where we attempt to catch illegal subregs
1035 created by the compiler. */
1036 if (GET_CODE (x) != SUBREG
8ad4c111 1037 || !REG_P (reg))
701e46d0 1038 abort ();
1039 base_regno = REGNO (reg);
1040 if (base_regno >= FIRST_PSEUDO_REGISTER)
1041 abort ();
475fa9bd 1042 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
701e46d0 1043 abort ();
d9b3752c 1044#ifdef ENABLE_CHECKING
1045 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
35cb5232 1046 SUBREG_BYTE (x), mode))
d9b3752c 1047 abort ();
1048#endif
701e46d0 1049 /* Catch non-congruent offsets too. */
1050 byte_offset = SUBREG_BYTE (x);
1051 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1052 abort ();
1053
1054 final_regno = subreg_regno (x);
1055
1056 return final_regno;
1057}
1058
15bbde2b 1059/* Return a value representing some low-order bits of X, where the number
1060 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1061 between floating-point and fixed-point values, rather, the bit
15bbde2b 1062 representation is returned.
1063
1064 This function handles the cases in common between gen_lowpart, below,
1065 and two variants in cse.c and combine.c. These are the cases that can
1066 be safely handled at all points in the compilation.
1067
1068 If this is not a case we can handle, return 0. */
1069
1070rtx
35cb5232 1071gen_lowpart_common (enum machine_mode mode, rtx x)
15bbde2b 1072{
701e46d0 1073 int msize = GET_MODE_SIZE (mode);
791172c5 1074 int xsize;
701e46d0 1075 int offset = 0;
791172c5 1076 enum machine_mode innermode;
1077
1078 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1079 so we have to make one up. Yuk. */
1080 innermode = GET_MODE (x);
1081 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
1082 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1083 else if (innermode == VOIDmode)
1084 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1085
1086 xsize = GET_MODE_SIZE (innermode);
1087
1088 if (innermode == VOIDmode || innermode == BLKmode)
1089 abort ();
15bbde2b 1090
791172c5 1091 if (innermode == mode)
15bbde2b 1092 return x;
1093
1094 /* MODE must occupy no more words than the mode of X. */
791172c5 1095 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1096 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
15bbde2b 1097 return 0;
1098
9abe1e73 1099 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
791172c5 1100 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
9abe1e73 1101 return 0;
1102
791172c5 1103 offset = subreg_lowpart_offset (mode, innermode);
15bbde2b 1104
1105 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 1106 && (GET_MODE_CLASS (mode) == MODE_INT
1107 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 1108 {
1109 /* If we are getting the low-order part of something that has been
1110 sign- or zero-extended, we can either just use the object being
1111 extended or make a narrower extension. If we want an even smaller
1112 piece than the size of the object being extended, call ourselves
1113 recursively.
1114
1115 This case is used mostly by combine and cse. */
1116
1117 if (GET_MODE (XEXP (x, 0)) == mode)
1118 return XEXP (x, 0);
791172c5 1119 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
15bbde2b 1120 return gen_lowpart_common (mode, XEXP (x, 0));
791172c5 1121 else if (msize < xsize)
3ad7bb1c 1122 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 1123 }
8ad4c111 1124 else if (GET_CODE (x) == SUBREG || REG_P (x)
791172c5 1125 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1126 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1127 return simplify_gen_subreg (mode, x, innermode, offset);
4a307dd5 1128
15bbde2b 1129 /* Otherwise, we can't do this. */
1130 return 0;
1131}
1132\f
591356e8 1133/* Return the constant real or imaginary part (which has mode MODE)
1134 of a complex value X. The IMAGPART_P argument determines whether
1135 the real or complex component should be returned. This function
1136 returns NULL_RTX if the component isn't a constant. */
1137
1138static rtx
35cb5232 1139gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
591356e8 1140{
1141 tree decl, part;
1142
e16ceb8e 1143 if (MEM_P (x)
e97f8822 1144 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
591356e8 1145 {
1146 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1147 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1148 {
1149 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1150 if (TREE_CODE (part) == REAL_CST
1151 || TREE_CODE (part) == INTEGER_CST)
1152 return expand_expr (part, NULL_RTX, mode, 0);
1153 }
1154 }
1155 return NULL_RTX;
1156}
1157
568f439b 1158/* Return the real part (which has mode MODE) of a complex value X.
1159 This always comes at the low address in memory. */
1160
1161rtx
35cb5232 1162gen_realpart (enum machine_mode mode, rtx x)
568f439b 1163{
591356e8 1164 rtx part;
1165
1166 /* Handle complex constants. */
1167 part = gen_complex_constant_part (mode, x, 0);
1168 if (part != NULL_RTX)
1169 return part;
1170
81802af6 1171 if (WORDS_BIG_ENDIAN
1172 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1173 && REG_P (x)
1174 && REGNO (x) < FIRST_PSEUDO_REGISTER)
f060a027 1175 internal_error
68435912 1176 ("can't access real part of complex value in hard register");
0864dc1b 1177 else if (WORDS_BIG_ENDIAN)
568f439b 1178 return gen_highpart (mode, x);
1179 else
1180 return gen_lowpart (mode, x);
1181}
1182
1183/* Return the imaginary part (which has mode MODE) of a complex value X.
1184 This always comes at the high address in memory. */
1185
1186rtx
35cb5232 1187gen_imagpart (enum machine_mode mode, rtx x)
568f439b 1188{
591356e8 1189 rtx part;
1190
1191 /* Handle complex constants. */
1192 part = gen_complex_constant_part (mode, x, 1);
1193 if (part != NULL_RTX)
1194 return part;
1195
81802af6 1196 if (WORDS_BIG_ENDIAN)
568f439b 1197 return gen_lowpart (mode, x);
701e46d0 1198 else if (! WORDS_BIG_ENDIAN
ea9a92b6 1199 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1200 && REG_P (x)
1201 && REGNO (x) < FIRST_PSEUDO_REGISTER)
f060a027 1202 internal_error
1203 ("can't access imaginary part of complex value in hard register");
568f439b 1204 else
1205 return gen_highpart (mode, x);
1206}
1207\f
d56d0ca2 1208rtx
35cb5232 1209gen_highpart (enum machine_mode mode, rtx x)
d56d0ca2 1210{
701e46d0 1211 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1212 rtx result;
701e46d0 1213
d56d0ca2 1214 /* This case loses if X is a subreg. To catch bugs early,
1215 complain if an invalid MODE is used even in other cases. */
701e46d0 1216 if (msize > UNITS_PER_WORD
8dbf3d07 1217 && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
d56d0ca2 1218 abort ();
701e46d0 1219
81802af6 1220 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1221 subreg_highpart_offset (mode, GET_MODE (x)));
a8c36ab2 1222
1223 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1224 the target if we have a MEM. gen_highpart must return a valid operand,
1225 emitting code if necessary to do so. */
e16ceb8e 1226 if (result != NULL_RTX && MEM_P (result))
a8c36ab2 1227 result = validize_mem (result);
1228
81802af6 1229 if (!result)
1230 abort ();
1231 return result;
1232}
704fcf2b 1233
29d56731 1234/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1235 be VOIDmode constant. */
1236rtx
35cb5232 1237gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
704fcf2b 1238{
1239 if (GET_MODE (exp) != VOIDmode)
1240 {
1241 if (GET_MODE (exp) != innermode)
1242 abort ();
1243 return gen_highpart (outermode, exp);
1244 }
1245 return simplify_gen_subreg (outermode, exp, innermode,
1246 subreg_highpart_offset (outermode, innermode));
1247}
d4c5e26d 1248
81802af6 1249/* Return offset in bytes to get OUTERMODE low part
1250 of the value in mode INNERMODE stored in memory in target format. */
10ef59ac 1251
81802af6 1252unsigned int
35cb5232 1253subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
81802af6 1254{
1255 unsigned int offset = 0;
1256 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1257
81802af6 1258 if (difference > 0)
d56d0ca2 1259 {
81802af6 1260 if (WORDS_BIG_ENDIAN)
1261 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1262 if (BYTES_BIG_ENDIAN)
1263 offset += difference % UNITS_PER_WORD;
d56d0ca2 1264 }
701e46d0 1265
81802af6 1266 return offset;
d56d0ca2 1267}
64ab453f 1268
81802af6 1269/* Return offset in bytes to get OUTERMODE high part
1270 of the value in mode INNERMODE stored in memory in target format. */
1271unsigned int
35cb5232 1272subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
64ab453f 1273{
1274 unsigned int offset = 0;
1275 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1276
81802af6 1277 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
d4c5e26d 1278 abort ();
81802af6 1279
64ab453f 1280 if (difference > 0)
1281 {
81802af6 1282 if (! WORDS_BIG_ENDIAN)
64ab453f 1283 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1284 if (! BYTES_BIG_ENDIAN)
64ab453f 1285 offset += difference % UNITS_PER_WORD;
1286 }
1287
81802af6 1288 return offset;
64ab453f 1289}
d56d0ca2 1290
15bbde2b 1291/* Return 1 iff X, assumed to be a SUBREG,
1292 refers to the least significant part of its containing reg.
1293 If X is not a SUBREG, always return 1 (it is its own low part!). */
1294
1295int
35cb5232 1296subreg_lowpart_p (rtx x)
15bbde2b 1297{
1298 if (GET_CODE (x) != SUBREG)
1299 return 1;
7e14c1bf 1300 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1301 return 0;
15bbde2b 1302
81802af6 1303 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1304 == SUBREG_BYTE (x));
15bbde2b 1305}
1306\f
701e46d0 1307/* Return subword OFFSET of operand OP.
1308 The word number, OFFSET, is interpreted as the word number starting
1309 at the low-order address. OFFSET 0 is the low-order word if not
1310 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1311
1312 If we cannot extract the required word, we return zero. Otherwise,
1313 an rtx corresponding to the requested word will be returned.
1314
1315 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1316 reload has completed, a valid address will always be returned. After
1317 reload, if a valid address cannot be returned, we return zero.
1318
1319 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1320 it is the responsibility of the caller.
1321
1322 MODE is the mode of OP in case it is a CONST_INT.
1323
1324 ??? This is still rather broken for some cases. The problem for the
1325 moment is that all callers of this thing provide no 'goal mode' to
1326 tell us to work with. This exists because all callers were written
84e81e84 1327 in a word based SUBREG world.
1328 Now use of this function can be deprecated by simplify_subreg in most
1329 cases.
1330 */
701e46d0 1331
1332rtx
35cb5232 1333operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
701e46d0 1334{
1335 if (mode == VOIDmode)
1336 mode = GET_MODE (op);
1337
1338 if (mode == VOIDmode)
1339 abort ();
1340
6312a35e 1341 /* If OP is narrower than a word, fail. */
701e46d0 1342 if (mode != BLKmode
1343 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1344 return 0;
1345
6312a35e 1346 /* If we want a word outside OP, return zero. */
701e46d0 1347 if (mode != BLKmode
1348 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1349 return const0_rtx;
1350
701e46d0 1351 /* Form a new MEM at the requested address. */
e16ceb8e 1352 if (MEM_P (op))
701e46d0 1353 {
e4e86ec5 1354 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1355
e4e86ec5 1356 if (! validate_address)
1357 return new;
1358
1359 else if (reload_completed)
701e46d0 1360 {
e4e86ec5 1361 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1362 return 0;
701e46d0 1363 }
e4e86ec5 1364 else
1365 return replace_equiv_address (new, XEXP (new, 0));
701e46d0 1366 }
1367
84e81e84 1368 /* Rest can be handled by simplify_subreg. */
1369 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1370}
1371
15bbde2b 1372/* Similar to `operand_subword', but never return 0. If we can't extract
1373 the required subword, put OP into a register and try again. If that fails,
d823ba47 1374 abort. We always validate the address in this case.
15bbde2b 1375
1376 MODE is the mode of OP, in case it is CONST_INT. */
1377
1378rtx
35cb5232 1379operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
15bbde2b 1380{
701e46d0 1381 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1382
1383 if (result)
1384 return result;
1385
1386 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1387 {
1388 /* If this is a register which can not be accessed by words, copy it
1389 to a pseudo register. */
8ad4c111 1390 if (REG_P (op))
ac825d29 1391 op = copy_to_reg (op);
1392 else
1393 op = force_reg (mode, op);
1394 }
15bbde2b 1395
701e46d0 1396 result = operand_subword (op, offset, 1, mode);
15bbde2b 1397 if (result == 0)
1398 abort ();
1399
1400 return result;
1401}
1402\f
1403/* Given a compare instruction, swap the operands.
1404 A test instruction is changed into a compare of 0 against the operand. */
1405
1406void
35cb5232 1407reverse_comparison (rtx insn)
15bbde2b 1408{
1409 rtx body = PATTERN (insn);
1410 rtx comp;
1411
1412 if (GET_CODE (body) == SET)
1413 comp = SET_SRC (body);
1414 else
1415 comp = SET_SRC (XVECEXP (body, 0, 0));
1416
1417 if (GET_CODE (comp) == COMPARE)
1418 {
1419 rtx op0 = XEXP (comp, 0);
1420 rtx op1 = XEXP (comp, 1);
1421 XEXP (comp, 0) = op1;
1422 XEXP (comp, 1) = op0;
1423 }
1424 else
1425 {
7014838c 1426 rtx new = gen_rtx_COMPARE (VOIDmode,
1427 CONST0_RTX (GET_MODE (comp)), comp);
15bbde2b 1428 if (GET_CODE (body) == SET)
1429 SET_SRC (body) = new;
1430 else
1431 SET_SRC (XVECEXP (body, 0, 0)) = new;
1432 }
1433}
1434\f
b10dbbca 1435/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1436 or (2) a component ref of something variable. Represent the later with
1437 a NULL expression. */
1438
1439static tree
35cb5232 1440component_ref_for_mem_expr (tree ref)
b10dbbca 1441{
1442 tree inner = TREE_OPERAND (ref, 0);
1443
1444 if (TREE_CODE (inner) == COMPONENT_REF)
1445 inner = component_ref_for_mem_expr (inner);
3c00f11c 1446 else
1447 {
3c00f11c 1448 /* Now remove any conversions: they don't change what the underlying
55f9d7dc 1449 object is. Likewise for SAVE_EXPR. */
3c00f11c 1450 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1451 || TREE_CODE (inner) == NON_LVALUE_EXPR
1452 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
55f9d7dc 1453 || TREE_CODE (inner) == SAVE_EXPR)
1454 inner = TREE_OPERAND (inner, 0);
3c00f11c 1455
1456 if (! DECL_P (inner))
1457 inner = NULL_TREE;
1458 }
b10dbbca 1459
1460 if (inner == TREE_OPERAND (ref, 0))
1461 return ref;
1462 else
b55f9493 1463 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1464 TREE_OPERAND (ref, 1), NULL_TREE);
b10dbbca 1465}
c6259b83 1466
b3ff8d90 1467/* Returns 1 if both MEM_EXPR can be considered equal
1468 and 0 otherwise. */
1469
1470int
1471mem_expr_equal_p (tree expr1, tree expr2)
1472{
1473 if (expr1 == expr2)
1474 return 1;
1475
1476 if (! expr1 || ! expr2)
1477 return 0;
1478
1479 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1480 return 0;
1481
1482 if (TREE_CODE (expr1) == COMPONENT_REF)
1483 return
1484 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1485 TREE_OPERAND (expr2, 0))
1486 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1487 TREE_OPERAND (expr2, 1));
1488
1489 if (TREE_CODE (expr1) == INDIRECT_REF)
1490 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1491 TREE_OPERAND (expr2, 0));
1492
1493 /* Decls with different pointers can't be equal. */
1494 if (DECL_P (expr1))
1495 return 0;
1496
1497 abort(); /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1498 have been resolved here. */
1499}
1500
c6259b83 1501/* Given REF, a MEM, and T, either the type of X or the expression
1502 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1503 if we are making a new object of this type. BITPOS is nonzero if
1504 there is an offset outstanding on T that will be applied later. */
c6259b83 1505
1506void
35cb5232 1507set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1508 HOST_WIDE_INT bitpos)
c6259b83 1509{
2a631e19 1510 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
b10dbbca 1511 tree expr = MEM_EXPR (ref);
2a631e19 1512 rtx offset = MEM_OFFSET (ref);
1513 rtx size = MEM_SIZE (ref);
1514 unsigned int align = MEM_ALIGN (ref);
6f717f77 1515 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1516 tree type;
1517
1518 /* It can happen that type_for_mode was given a mode for which there
1519 is no language-level type. In which case it returns NULL, which
1520 we can see here. */
1521 if (t == NULL_TREE)
1522 return;
1523
1524 type = TYPE_P (t) ? t : TREE_TYPE (t);
4ccffa39 1525 if (type == error_mark_node)
1526 return;
c6259b83 1527
c6259b83 1528 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1529 wrong answer, as it assumes that DECL_RTL already has the right alias
1530 info. Callers should not set DECL_RTL until after the call to
1531 set_mem_attributes. */
1532 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1533 abort ();
1534
96216d37 1535 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1536 front-end routine) and use it. */
1537 alias = get_alias_set (t);
c6259b83 1538
fbc6244b 1539 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
c6259b83 1540 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
b8098e5b 1541 RTX_UNCHANGING_P (ref)
278fe152 1542 |= ((lang_hooks.honor_readonly
66d12a6c 1543 && (TYPE_READONLY (type) || (t != type && TREE_READONLY (t))))
278fe152 1544 || (! TYPE_P (t) && TREE_CONSTANT (t)));
8d350e69 1545 MEM_POINTER (ref) = POINTER_TYPE_P (type);
010d0641 1546 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (t);
c6259b83 1547
2a631e19 1548 /* If we are making an object of this type, or if this is a DECL, we know
1549 that it is a scalar if the type is not an aggregate. */
1550 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
c6259b83 1551 MEM_SCALAR_P (ref) = 1;
1552
a9d9ab08 1553 /* We can set the alignment from the type if we are making an object,
1554 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1555 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1556 align = MAX (align, TYPE_ALIGN (type));
ecfe4ca9 1557
96216d37 1558 /* If the size is known, we can set that. */
1559 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
2a631e19 1560 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
96216d37 1561
579bccf9 1562 /* If T is not a type, we may be able to deduce some more information about
1563 the expression. */
1564 if (! TYPE_P (t))
2a631e19 1565 {
1566 maybe_set_unchanging (ref, t);
1567 if (TREE_THIS_VOLATILE (t))
1568 MEM_VOLATILE_P (ref) = 1;
c6259b83 1569
3c00f11c 1570 /* Now remove any conversions: they don't change what the underlying
1571 object is. Likewise for SAVE_EXPR. */
2a631e19 1572 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
3c00f11c 1573 || TREE_CODE (t) == NON_LVALUE_EXPR
1574 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1575 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1576 t = TREE_OPERAND (t, 0);
1577
5cc193e7 1578 /* If this expression can't be addressed (e.g., it contains a reference
1579 to a non-addressable field), show we don't change its alias set. */
1580 if (! can_address_p (t))
1581 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1582
2a631e19 1583 /* If this is a decl, set the attributes of the MEM from it. */
1584 if (DECL_P (t))
1585 {
b10dbbca 1586 expr = t;
1587 offset = const0_rtx;
6f717f77 1588 apply_bitpos = bitpos;
2a631e19 1589 size = (DECL_SIZE_UNIT (t)
1590 && host_integerp (DECL_SIZE_UNIT (t), 1)
1591 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
d4c5e26d 1592 align = DECL_ALIGN (t);
2a631e19 1593 }
1594
ecfe4ca9 1595 /* If this is a constant, we know the alignment. */
42f6f447 1596 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1597 {
1598 align = TYPE_ALIGN (type);
1599#ifdef CONSTANT_ALIGNMENT
1600 align = CONSTANT_ALIGNMENT (t, align);
1601#endif
1602 }
b10dbbca 1603
1604 /* If this is a field reference and not a bit-field, record it. */
1605 /* ??? There is some information that can be gleened from bit-fields,
1606 such as the word offset in the structure that might be modified.
1607 But skip it for now. */
1608 else if (TREE_CODE (t) == COMPONENT_REF
1609 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1610 {
1611 expr = component_ref_for_mem_expr (t);
1612 offset = const0_rtx;
6f717f77 1613 apply_bitpos = bitpos;
b10dbbca 1614 /* ??? Any reason the field size would be different than
1615 the size we got from the type? */
1616 }
1617
1618 /* If this is an array reference, look for an outer field reference. */
1619 else if (TREE_CODE (t) == ARRAY_REF)
1620 {
1621 tree off_tree = size_zero_node;
6b039979 1622 /* We can't modify t, because we use it at the end of the
1623 function. */
1624 tree t2 = t;
b10dbbca 1625
1626 do
1627 {
6b039979 1628 tree index = TREE_OPERAND (t2, 1);
6374121b 1629 tree low_bound = array_ref_low_bound (t2);
1630 tree unit_size = array_ref_element_size (t2);
97f8ce30 1631
1632 /* We assume all arrays have sizes that are a multiple of a byte.
1633 First subtract the lower bound, if any, in the type of the
6374121b 1634 index, then convert to sizetype and multiply by the size of
1635 the array element. */
1636 if (! integer_zerop (low_bound))
b55f9493 1637 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
1638 index, low_bound));
97f8ce30 1639
6374121b 1640 off_tree = size_binop (PLUS_EXPR,
1641 size_binop (MULT_EXPR, convert (sizetype,
1642 index),
1643 unit_size),
1644 off_tree);
6b039979 1645 t2 = TREE_OPERAND (t2, 0);
b10dbbca 1646 }
6b039979 1647 while (TREE_CODE (t2) == ARRAY_REF);
b10dbbca 1648
6b039979 1649 if (DECL_P (t2))
2d8fe5d0 1650 {
6b039979 1651 expr = t2;
0318dc09 1652 offset = NULL;
2d8fe5d0 1653 if (host_integerp (off_tree, 1))
0318dc09 1654 {
1655 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1656 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
6b039979 1657 align = DECL_ALIGN (t2);
3473aefe 1658 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
0318dc09 1659 align = aoff;
1660 offset = GEN_INT (ioff);
6f717f77 1661 apply_bitpos = bitpos;
0318dc09 1662 }
2d8fe5d0 1663 }
6b039979 1664 else if (TREE_CODE (t2) == COMPONENT_REF)
b10dbbca 1665 {
6b039979 1666 expr = component_ref_for_mem_expr (t2);
b10dbbca 1667 if (host_integerp (off_tree, 1))
6f717f77 1668 {
1669 offset = GEN_INT (tree_low_cst (off_tree, 1));
1670 apply_bitpos = bitpos;
1671 }
b10dbbca 1672 /* ??? Any reason the field size would be different than
1673 the size we got from the type? */
1674 }
2d8fe5d0 1675 else if (flag_argument_noalias > 1
6b039979 1676 && TREE_CODE (t2) == INDIRECT_REF
1677 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
2d8fe5d0 1678 {
6b039979 1679 expr = t2;
2d8fe5d0 1680 offset = NULL;
1681 }
1682 }
1683
1684 /* If this is a Fortran indirect argument reference, record the
1685 parameter decl. */
1686 else if (flag_argument_noalias > 1
1687 && TREE_CODE (t) == INDIRECT_REF
1688 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1689 {
1690 expr = t;
1691 offset = NULL;
b10dbbca 1692 }
2a631e19 1693 }
1694
e2e205b3 1695 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 1696 bit position offset. Similarly, increase the size of the accessed
1697 object to contain the negative offset. */
6f717f77 1698 if (apply_bitpos)
595f1461 1699 {
1700 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1701 if (size)
1702 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1703 }
6f717f77 1704
2a631e19 1705 /* Now set the attributes we computed above. */
5cc193e7 1706 MEM_ATTRS (ref)
b10dbbca 1707 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2a631e19 1708
1709 /* If this is already known to be a scalar or aggregate, we are done. */
1710 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
96216d37 1711 return;
1712
2a631e19 1713 /* If it is a reference into an aggregate, this is part of an aggregate.
1714 Otherwise we don't know. */
c6259b83 1715 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1716 || TREE_CODE (t) == ARRAY_RANGE_REF
1717 || TREE_CODE (t) == BIT_FIELD_REF)
1718 MEM_IN_STRUCT_P (ref) = 1;
1719}
1720
6f717f77 1721void
35cb5232 1722set_mem_attributes (rtx ref, tree t, int objectp)
6f717f77 1723{
1724 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1725}
1726
ca74b940 1727/* Set the decl for MEM to DECL. */
1728
1729void
35cb5232 1730set_mem_attrs_from_reg (rtx mem, rtx reg)
ca74b940 1731{
1732 MEM_ATTRS (mem)
1733 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1734 GEN_INT (REG_OFFSET (reg)),
1735 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1736}
1737
c6259b83 1738/* Set the alias set of MEM to SET. */
1739
1740void
35cb5232 1741set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
c6259b83 1742{
d4c5e26d 1743#ifdef ENABLE_CHECKING
c6259b83 1744 /* If the new and old alias sets don't conflict, something is wrong. */
1745 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1746 abort ();
c6259b83 1747#endif
1748
b10dbbca 1749 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
5cc193e7 1750 MEM_SIZE (mem), MEM_ALIGN (mem),
1751 GET_MODE (mem));
c6259b83 1752}
96216d37 1753
1c4512da 1754/* Set the alignment of MEM to ALIGN bits. */
96216d37 1755
1756void
35cb5232 1757set_mem_align (rtx mem, unsigned int align)
96216d37 1758{
b10dbbca 1759 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
5cc193e7 1760 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1761 GET_MODE (mem));
96216d37 1762}
278fe152 1763
b10dbbca 1764/* Set the expr for MEM to EXPR. */
278fe152 1765
1766void
35cb5232 1767set_mem_expr (rtx mem, tree expr)
278fe152 1768{
1769 MEM_ATTRS (mem)
b10dbbca 1770 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
278fe152 1771 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1772}
b10dbbca 1773
1774/* Set the offset of MEM to OFFSET. */
1775
1776void
35cb5232 1777set_mem_offset (rtx mem, rtx offset)
b10dbbca 1778{
1779 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1780 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1781 GET_MODE (mem));
f0500469 1782}
1783
1784/* Set the size of MEM to SIZE. */
1785
1786void
35cb5232 1787set_mem_size (rtx mem, rtx size)
f0500469 1788{
1789 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1790 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1791 GET_MODE (mem));
b10dbbca 1792}
c6259b83 1793\f
96216d37 1794/* Return a memory reference like MEMREF, but with its mode changed to MODE
1795 and its address changed to ADDR. (VOIDmode means don't change the mode.
1796 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1797 returned memory location is required to be valid. The memory
1798 attributes are not changed. */
15bbde2b 1799
96216d37 1800static rtx
35cb5232 1801change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
15bbde2b 1802{
1803 rtx new;
1804
e16ceb8e 1805 if (!MEM_P (memref))
15bbde2b 1806 abort ();
1807 if (mode == VOIDmode)
1808 mode = GET_MODE (memref);
1809 if (addr == 0)
1810 addr = XEXP (memref, 0);
3988ef8b 1811 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1812 && (!validate || memory_address_p (mode, addr)))
1813 return memref;
15bbde2b 1814
e4e86ec5 1815 if (validate)
15bbde2b 1816 {
e4e86ec5 1817 if (reload_in_progress || reload_completed)
1818 {
1819 if (! memory_address_p (mode, addr))
1820 abort ();
1821 }
1822 else
1823 addr = memory_address (mode, addr);
15bbde2b 1824 }
d823ba47 1825
e8976cd7 1826 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1827 return memref;
1828
3ad7bb1c 1829 new = gen_rtx_MEM (mode, addr);
6a0934dd 1830 MEM_COPY_ATTRIBUTES (new, memref);
15bbde2b 1831 return new;
1832}
537ffcfc 1833
96216d37 1834/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1835 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 1836
1837rtx
35cb5232 1838change_address (rtx memref, enum machine_mode mode, rtx addr)
e513d163 1839{
0ab96142 1840 rtx new = change_address_1 (memref, mode, addr, 1), size;
96216d37 1841 enum machine_mode mmode = GET_MODE (new);
0ab96142 1842 unsigned int align;
1843
1844 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1845 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
6cc60c4d 1846
d28edf0d 1847 /* If there are no changes, just return the original memory reference. */
1848 if (new == memref)
0ab96142 1849 {
1850 if (MEM_ATTRS (memref) == 0
1851 || (MEM_EXPR (memref) == NULL
1852 && MEM_OFFSET (memref) == NULL
1853 && MEM_SIZE (memref) == size
1854 && MEM_ALIGN (memref) == align))
1855 return new;
1856
6c231ace 1857 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
0ab96142 1858 MEM_COPY_ATTRIBUTES (new, memref);
1859 }
d28edf0d 1860
96216d37 1861 MEM_ATTRS (new)
0ab96142 1862 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
fb257ae6 1863
96216d37 1864 return new;
e513d163 1865}
537ffcfc 1866
96216d37 1867/* Return a memory reference like MEMREF, but with its mode changed
1868 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 1869 nonzero, the memory address is forced to be valid.
1870 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1871 and caller is responsible for adjusting MEMREF base register. */
e4e86ec5 1872
1873rtx
35cb5232 1874adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1875 int validate, int adjust)
e4e86ec5 1876{
fb257ae6 1877 rtx addr = XEXP (memref, 0);
96216d37 1878 rtx new;
1879 rtx memoffset = MEM_OFFSET (memref);
5cc193e7 1880 rtx size = 0;
96216d37 1881 unsigned int memalign = MEM_ALIGN (memref);
fb257ae6 1882
d28edf0d 1883 /* If there are no changes, just return the original memory reference. */
1884 if (mode == GET_MODE (memref) && !offset
1885 && (!validate || memory_address_p (mode, addr)))
1886 return memref;
1887
e36c3d58 1888 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 1889 This may happen even if offset is nonzero -- consider
e36c3d58 1890 (plus (plus reg reg) const_int) -- so do this always. */
1891 addr = copy_rtx (addr);
1892
cd358719 1893 if (adjust)
1894 {
1895 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1896 object, we can merge it into the LO_SUM. */
1897 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1898 && offset >= 0
1899 && (unsigned HOST_WIDE_INT) offset
1900 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1901 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1902 plus_constant (XEXP (addr, 1), offset));
1903 else
1904 addr = plus_constant (addr, offset);
1905 }
fb257ae6 1906
96216d37 1907 new = change_address_1 (memref, mode, addr, validate);
1908
1909 /* Compute the new values of the memory attributes due to this adjustment.
1910 We add the offsets and update the alignment. */
1911 if (memoffset)
1912 memoffset = GEN_INT (offset + INTVAL (memoffset));
1913
b8098e5b 1914 /* Compute the new alignment by taking the MIN of the alignment and the
1915 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1916 if zero. */
1917 if (offset != 0)
f4aee538 1918 memalign
1919 = MIN (memalign,
1920 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
96216d37 1921
5cc193e7 1922 /* We can compute the size in a number of ways. */
2b96c5f6 1923 if (GET_MODE (new) != BLKmode)
1924 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
5cc193e7 1925 else if (MEM_SIZE (memref))
1926 size = plus_constant (MEM_SIZE (memref), -offset);
1927
b10dbbca 1928 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
5cc193e7 1929 memoffset, size, memalign, GET_MODE (new));
96216d37 1930
1931 /* At some point, we should validate that this offset is within the object,
1932 if all the appropriate values are known. */
1933 return new;
e4e86ec5 1934}
1935
bf42c62d 1936/* Return a memory reference like MEMREF, but with its mode changed
1937 to MODE and its address changed to ADDR, which is assumed to be
1938 MEMREF offseted by OFFSET bytes. If VALIDATE is
1939 nonzero, the memory address is forced to be valid. */
1940
1941rtx
35cb5232 1942adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1943 HOST_WIDE_INT offset, int validate)
bf42c62d 1944{
1945 memref = change_address_1 (memref, VOIDmode, addr, validate);
1946 return adjust_address_1 (memref, mode, offset, validate, 0);
1947}
1948
2a631e19 1949/* Return a memory reference like MEMREF, but whose address is changed by
1950 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1951 known to be in OFFSET (possibly 1). */
fcdc122e 1952
1953rtx
35cb5232 1954offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
fcdc122e 1955{
fac6aae6 1956 rtx new, addr = XEXP (memref, 0);
1957
1958 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1959
d4c5e26d 1960 /* At this point we don't know _why_ the address is invalid. It
917bbcab 1961 could have secondary memory references, multiplies or anything.
fac6aae6 1962
1963 However, if we did go and rearrange things, we can wind up not
1964 being able to recognize the magic around pic_offset_table_rtx.
1965 This stuff is fragile, and is yet another example of why it is
1966 bad to expose PIC machinery too early. */
1967 if (! memory_address_p (GET_MODE (memref), new)
1968 && GET_CODE (addr) == PLUS
1969 && XEXP (addr, 0) == pic_offset_table_rtx)
1970 {
1971 addr = force_reg (GET_MODE (addr), addr);
1972 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1973 }
1974
430816ab 1975 update_temp_slot_address (XEXP (memref, 0), new);
fac6aae6 1976 new = change_address_1 (memref, VOIDmode, new, 1);
fcdc122e 1977
d28edf0d 1978 /* If there are no changes, just return the original memory reference. */
1979 if (new == memref)
1980 return new;
1981
fcdc122e 1982 /* Update the alignment to reflect the offset. Reset the offset, which
1983 we don't know. */
80fabb90 1984 MEM_ATTRS (new)
1985 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
84130727 1986 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
80fabb90 1987 GET_MODE (new));
fcdc122e 1988 return new;
1989}
d4c5e26d 1990
537ffcfc 1991/* Return a memory reference like MEMREF, but with its address changed to
1992 ADDR. The caller is asserting that the actual piece of memory pointed
1993 to is the same, just the form of the address is being changed, such as
1994 by putting something into a register. */
1995
1996rtx
35cb5232 1997replace_equiv_address (rtx memref, rtx addr)
537ffcfc 1998{
96216d37 1999 /* change_address_1 copies the memory attribute structure without change
2000 and that's exactly what we want here. */
ecfe4ca9 2001 update_temp_slot_address (XEXP (memref, 0), addr);
96216d37 2002 return change_address_1 (memref, VOIDmode, addr, 1);
537ffcfc 2003}
96216d37 2004
e4e86ec5 2005/* Likewise, but the reference is not required to be valid. */
2006
2007rtx
35cb5232 2008replace_equiv_address_nv (rtx memref, rtx addr)
e4e86ec5 2009{
e4e86ec5 2010 return change_address_1 (memref, VOIDmode, addr, 0);
2011}
8259ab07 2012
2013/* Return a memory reference like MEMREF, but with its mode widened to
2014 MODE and offset by OFFSET. This would be used by targets that e.g.
2015 cannot issue QImode memory operations and have to use SImode memory
2016 operations plus masking logic. */
2017
2018rtx
35cb5232 2019widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
8259ab07 2020{
2021 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2022 tree expr = MEM_EXPR (new);
2023 rtx memoffset = MEM_OFFSET (new);
2024 unsigned int size = GET_MODE_SIZE (mode);
2025
d28edf0d 2026 /* If there are no changes, just return the original memory reference. */
2027 if (new == memref)
2028 return new;
2029
8259ab07 2030 /* If we don't know what offset we were at within the expression, then
2031 we can't know if we've overstepped the bounds. */
22ee087b 2032 if (! memoffset)
8259ab07 2033 expr = NULL_TREE;
2034
2035 while (expr)
2036 {
2037 if (TREE_CODE (expr) == COMPONENT_REF)
2038 {
2039 tree field = TREE_OPERAND (expr, 1);
6374121b 2040 tree offset = component_ref_field_offset (expr);
8259ab07 2041
2042 if (! DECL_SIZE_UNIT (field))
2043 {
2044 expr = NULL_TREE;
2045 break;
2046 }
2047
2048 /* Is the field at least as large as the access? If so, ok,
2049 otherwise strip back to the containing structure. */
8359cfb4 2050 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2051 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
8259ab07 2052 && INTVAL (memoffset) >= 0)
2053 break;
2054
6374121b 2055 if (! host_integerp (offset, 1))
8259ab07 2056 {
2057 expr = NULL_TREE;
2058 break;
2059 }
2060
2061 expr = TREE_OPERAND (expr, 0);
6374121b 2062 memoffset
2063 = (GEN_INT (INTVAL (memoffset)
2064 + tree_low_cst (offset, 1)
2065 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2066 / BITS_PER_UNIT)));
8259ab07 2067 }
2068 /* Similarly for the decl. */
2069 else if (DECL_P (expr)
2070 && DECL_SIZE_UNIT (expr)
40c4e66e 2071 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
8259ab07 2072 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2073 && (! memoffset || INTVAL (memoffset) >= 0))
2074 break;
2075 else
2076 {
2077 /* The widened memory access overflows the expression, which means
2078 that it could alias another expression. Zap it. */
2079 expr = NULL_TREE;
2080 break;
2081 }
2082 }
2083
2084 if (! expr)
2085 memoffset = NULL_RTX;
2086
2087 /* The widened memory may alias other stuff, so zap the alias set. */
2088 /* ??? Maybe use get_alias_set on any remaining expression. */
2089
2090 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2091 MEM_ALIGN (new), mode);
2092
2093 return new;
2094}
15bbde2b 2095\f
2096/* Return a newly created CODE_LABEL rtx with a unique label number. */
2097
2098rtx
35cb5232 2099gen_label_rtx (void)
15bbde2b 2100{
a7ae1e59 2101 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
35cb5232 2102 NULL, label_num++, NULL);
15bbde2b 2103}
2104\f
2105/* For procedure integration. */
2106
15bbde2b 2107/* Install new pointers to the first and last insns in the chain.
d4c332ff 2108 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2109 Used for an inline-procedure after copying the insn chain. */
2110
2111void
35cb5232 2112set_new_first_and_last_insn (rtx first, rtx last)
15bbde2b 2113{
d4c332ff 2114 rtx insn;
2115
15bbde2b 2116 first_insn = first;
2117 last_insn = last;
d4c332ff 2118 cur_insn_uid = 0;
2119
2120 for (insn = first; insn; insn = NEXT_INSN (insn))
2121 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2122
2123 cur_insn_uid++;
15bbde2b 2124}
2125
0a893c29 2126/* Set the last label number found in the current function.
2127 This is used when belatedly compiling an inline function. */
15bbde2b 2128
2129void
35cb5232 2130set_new_last_label_num (int last)
15bbde2b 2131{
0a893c29 2132 base_label_num = label_num;
2133 last_label_num = last;
15bbde2b 2134}
0a893c29 2135\f
15bbde2b 2136/* Restore all variables describing the current status from the structure *P.
2137 This is used after a nested function. */
2138
2139void
35cb5232 2140restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
15bbde2b 2141{
bb9d6298 2142 last_label_num = 0;
15bbde2b 2143}
2144\f
d823ba47 2145/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2146 structure. This routine should only be called once. */
15bbde2b 2147
a40c0eeb 2148static void
2149unshare_all_rtl_1 (tree fndecl, rtx insn)
15bbde2b 2150{
2d96a59a 2151 tree decl;
15bbde2b 2152
2d96a59a 2153 /* Make sure that virtual parameters are not shared. */
2154 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
0e8e37b2 2155 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2d96a59a 2156
72ec8878 2157 /* Make sure that virtual stack slots are not shared. */
2158 unshare_all_decls (DECL_INITIAL (fndecl));
2159
2d96a59a 2160 /* Unshare just about everything else. */
1cd4cfea 2161 unshare_all_rtl_in_chain (insn);
d823ba47 2162
15bbde2b 2163 /* Make sure the addresses of stack slots found outside the insn chain
2164 (such as, in DECL_RTL of a variable) are not shared
2165 with the insn chain.
2166
2167 This special care is necessary when the stack slot MEM does not
2168 actually appear in the insn chain. If it does appear, its address
2169 is unshared from all else at that point. */
45733446 2170 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
15bbde2b 2171}
2172
d823ba47 2173/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2174 structure, again. This is a fairly expensive thing to do so it
2175 should be done sparingly. */
2176
2177void
35cb5232 2178unshare_all_rtl_again (rtx insn)
2d96a59a 2179{
2180 rtx p;
5244079b 2181 tree decl;
2182
2d96a59a 2183 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2184 if (INSN_P (p))
2d96a59a 2185 {
2186 reset_used_flags (PATTERN (p));
2187 reset_used_flags (REG_NOTES (p));
2188 reset_used_flags (LOG_LINKS (p));
2189 }
5244079b 2190
01dc9f0c 2191 /* Make sure that virtual stack slots are not shared. */
2192 reset_used_decls (DECL_INITIAL (cfun->decl));
2193
5244079b 2194 /* Make sure that virtual parameters are not shared. */
2195 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2196 reset_used_flags (DECL_RTL (decl));
2197
2198 reset_used_flags (stack_slot_list);
2199
a40c0eeb 2200 unshare_all_rtl_1 (cfun->decl, insn);
2201}
2202
2203void
2204unshare_all_rtl (void)
2205{
2206 unshare_all_rtl_1 (current_function_decl, get_insns ());
2d96a59a 2207}
2208
1cd4cfea 2209/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2210 Recursively does the same for subexpressions. */
2211
2212static void
2213verify_rtx_sharing (rtx orig, rtx insn)
2214{
2215 rtx x = orig;
2216 int i;
2217 enum rtx_code code;
2218 const char *format_ptr;
2219
2220 if (x == 0)
2221 return;
2222
2223 code = GET_CODE (x);
2224
2225 /* These types may be freely shared. */
2226
2227 switch (code)
2228 {
2229 case REG:
1cd4cfea 2230 case CONST_INT:
2231 case CONST_DOUBLE:
2232 case CONST_VECTOR:
2233 case SYMBOL_REF:
2234 case LABEL_REF:
2235 case CODE_LABEL:
2236 case PC:
2237 case CC0:
2238 case SCRATCH:
1cd4cfea 2239 return;
c09425a0 2240 /* SCRATCH must be shared because they represent distinct values. */
2241 case CLOBBER:
2242 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2243 return;
2244 break;
1cd4cfea 2245
2246 case CONST:
2247 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2248 a LABEL_REF, it isn't sharable. */
2249 if (GET_CODE (XEXP (x, 0)) == PLUS
2250 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2251 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2252 return;
2253 break;
2254
2255 case MEM:
2256 /* A MEM is allowed to be shared if its address is constant. */
2257 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2258 || reload_completed || reload_in_progress)
2259 return;
2260
2261 break;
2262
2263 default:
2264 break;
2265 }
2266
2267 /* This rtx may not be shared. If it has already been seen,
2268 replace it with a copy of itself. */
2269
2270 if (RTX_FLAG (x, used))
2271 {
2272 error ("Invalid rtl sharing found in the insn");
2273 debug_rtx (insn);
2274 error ("Shared rtx");
2275 debug_rtx (x);
2276 abort ();
2277 }
2278 RTX_FLAG (x, used) = 1;
2279
8b332087 2280 /* Now scan the subexpressions recursively. */
1cd4cfea 2281
2282 format_ptr = GET_RTX_FORMAT (code);
2283
2284 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2285 {
2286 switch (*format_ptr++)
2287 {
2288 case 'e':
2289 verify_rtx_sharing (XEXP (x, i), insn);
2290 break;
2291
2292 case 'E':
2293 if (XVEC (x, i) != NULL)
2294 {
2295 int j;
2296 int len = XVECLEN (x, i);
2297
2298 for (j = 0; j < len; j++)
2299 {
2300 /* We allow sharing of ASM_OPERANDS inside single instruction. */
2301 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2302 && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
2303 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2304 else
2305 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2306 }
2307 }
2308 break;
2309 }
2310 }
2311 return;
2312}
2313
c7bf1374 2314/* Go through all the RTL insn bodies and check that there is no unexpected
1cd4cfea 2315 sharing in between the subexpressions. */
2316
2317void
2318verify_rtl_sharing (void)
2319{
2320 rtx p;
2321
2322 for (p = get_insns (); p; p = NEXT_INSN (p))
2323 if (INSN_P (p))
2324 {
2325 reset_used_flags (PATTERN (p));
2326 reset_used_flags (REG_NOTES (p));
2327 reset_used_flags (LOG_LINKS (p));
2328 }
2329
2330 for (p = get_insns (); p; p = NEXT_INSN (p))
2331 if (INSN_P (p))
2332 {
2333 verify_rtx_sharing (PATTERN (p), p);
2334 verify_rtx_sharing (REG_NOTES (p), p);
2335 verify_rtx_sharing (LOG_LINKS (p), p);
2336 }
2337}
2338
2d96a59a 2339/* Go through all the RTL insn bodies and copy any invalid shared structure.
2340 Assumes the mark bits are cleared at entry. */
2341
1cd4cfea 2342void
2343unshare_all_rtl_in_chain (rtx insn)
2d96a59a 2344{
2345 for (; insn; insn = NEXT_INSN (insn))
9204e736 2346 if (INSN_P (insn))
2d96a59a 2347 {
2348 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2349 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2350 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2351 }
2352}
2353
72ec8878 2354/* Go through all virtual stack slots of a function and copy any
2355 shared structure. */
2356static void
35cb5232 2357unshare_all_decls (tree blk)
72ec8878 2358{
2359 tree t;
2360
2361 /* Copy shared decls. */
2362 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
0e8e37b2 2363 if (DECL_RTL_SET_P (t))
2364 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
72ec8878 2365
2366 /* Now process sub-blocks. */
2367 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2368 unshare_all_decls (t);
2369}
2370
01dc9f0c 2371/* Go through all virtual stack slots of a function and mark them as
6312a35e 2372 not shared. */
01dc9f0c 2373static void
35cb5232 2374reset_used_decls (tree blk)
01dc9f0c 2375{
2376 tree t;
2377
2378 /* Mark decls. */
2379 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
0e8e37b2 2380 if (DECL_RTL_SET_P (t))
2381 reset_used_flags (DECL_RTL (t));
01dc9f0c 2382
2383 /* Now process sub-blocks. */
2384 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2385 reset_used_decls (t);
2386}
2387
c15aa775 2388/* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
dd367d39 2389 placed in the result directly, rather than being copied. MAY_SHARE is
2390 either a MEM of an EXPR_LIST of MEMs. */
c15aa775 2391
2392rtx
35cb5232 2393copy_most_rtx (rtx orig, rtx may_share)
c15aa775 2394{
2395 rtx copy;
2396 int i, j;
2397 RTX_CODE code;
2398 const char *format_ptr;
2399
dd367d39 2400 if (orig == may_share
2401 || (GET_CODE (may_share) == EXPR_LIST
2402 && in_expr_list_p (may_share, orig)))
c15aa775 2403 return orig;
2404
2405 code = GET_CODE (orig);
2406
2407 switch (code)
2408 {
2409 case REG:
c15aa775 2410 case CONST_INT:
2411 case CONST_DOUBLE:
2412 case CONST_VECTOR:
2413 case SYMBOL_REF:
2414 case CODE_LABEL:
2415 case PC:
2416 case CC0:
2417 return orig;
2418 default:
2419 break;
2420 }
2421
2422 copy = rtx_alloc (code);
2423 PUT_MODE (copy, GET_MODE (orig));
7c25cb91 2424 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2425 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2426 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
7c25cb91 2427 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
4ee9c684 2428 RTX_FLAG (copy, return_val) = RTX_FLAG (orig, return_val);
c15aa775 2429
2430 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2431
2432 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2433 {
2434 switch (*format_ptr++)
2435 {
2436 case 'e':
2437 XEXP (copy, i) = XEXP (orig, i);
2438 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2439 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2440 break;
2441
2442 case 'u':
2443 XEXP (copy, i) = XEXP (orig, i);
2444 break;
2445
2446 case 'E':
2447 case 'V':
2448 XVEC (copy, i) = XVEC (orig, i);
2449 if (XVEC (orig, i) != NULL)
2450 {
2451 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2452 for (j = 0; j < XVECLEN (copy, i); j++)
2453 XVECEXP (copy, i, j)
2454 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2455 }
2456 break;
2457
2458 case 'w':
2459 XWINT (copy, i) = XWINT (orig, i);
2460 break;
2461
2462 case 'n':
2463 case 'i':
2464 XINT (copy, i) = XINT (orig, i);
2465 break;
2466
2467 case 't':
2468 XTREE (copy, i) = XTREE (orig, i);
2469 break;
2470
2471 case 's':
2472 case 'S':
2473 XSTR (copy, i) = XSTR (orig, i);
2474 break;
2475
2476 case '0':
bf6b5685 2477 X0ANY (copy, i) = X0ANY (orig, i);
c15aa775 2478 break;
2479
2480 default:
2481 abort ();
2482 }
2483 }
2484 return copy;
2485}
2486
15bbde2b 2487/* Mark ORIG as in use, and return a copy of it if it was already in use.
7ba6ce7a 2488 Recursively does the same for subexpressions. Uses
2489 copy_rtx_if_shared_1 to reduce stack space. */
15bbde2b 2490
2491rtx
35cb5232 2492copy_rtx_if_shared (rtx orig)
15bbde2b 2493{
0e0727c4 2494 copy_rtx_if_shared_1 (&orig);
2495 return orig;
2496}
2497
7ba6ce7a 2498/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2499 use. Recursively does the same for subexpressions. */
2500
0e0727c4 2501static void
2502copy_rtx_if_shared_1 (rtx *orig1)
2503{
2504 rtx x;
19cb6b50 2505 int i;
2506 enum rtx_code code;
0e0727c4 2507 rtx *last_ptr;
19cb6b50 2508 const char *format_ptr;
15bbde2b 2509 int copied = 0;
0e0727c4 2510 int length;
2511
2512 /* Repeat is used to turn tail-recursion into iteration. */
2513repeat:
2514 x = *orig1;
15bbde2b 2515
2516 if (x == 0)
0e0727c4 2517 return;
15bbde2b 2518
2519 code = GET_CODE (x);
2520
2521 /* These types may be freely shared. */
2522
2523 switch (code)
2524 {
2525 case REG:
15bbde2b 2526 case CONST_INT:
2527 case CONST_DOUBLE:
886cfd4f 2528 case CONST_VECTOR:
15bbde2b 2529 case SYMBOL_REF:
1cd4cfea 2530 case LABEL_REF:
15bbde2b 2531 case CODE_LABEL:
2532 case PC:
2533 case CC0:
2534 case SCRATCH:
a92771b8 2535 /* SCRATCH must be shared because they represent distinct values. */
0e0727c4 2536 return;
c09425a0 2537 case CLOBBER:
2538 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2539 return;
2540 break;
15bbde2b 2541
f63d12e3 2542 case CONST:
2543 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2544 a LABEL_REF, it isn't sharable. */
2545 if (GET_CODE (XEXP (x, 0)) == PLUS
2546 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2547 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
0e0727c4 2548 return;
f63d12e3 2549 break;
2550
15bbde2b 2551 case INSN:
2552 case JUMP_INSN:
2553 case CALL_INSN:
2554 case NOTE:
15bbde2b 2555 case BARRIER:
2556 /* The chain of insns is not being copied. */
0e0727c4 2557 return;
15bbde2b 2558
0dbd1c74 2559 default:
2560 break;
15bbde2b 2561 }
2562
2563 /* This rtx may not be shared. If it has already been seen,
2564 replace it with a copy of itself. */
2565
7c25cb91 2566 if (RTX_FLAG (x, used))
15bbde2b 2567 {
19cb6b50 2568 rtx copy;
15bbde2b 2569
2570 copy = rtx_alloc (code);
bf6b5685 2571 memcpy (copy, x, RTX_SIZE (code));
15bbde2b 2572 x = copy;
2573 copied = 1;
2574 }
7c25cb91 2575 RTX_FLAG (x, used) = 1;
15bbde2b 2576
2577 /* Now scan the subexpressions recursively.
2578 We can store any replaced subexpressions directly into X
2579 since we know X is not shared! Any vectors in X
2580 must be copied if X was copied. */
2581
2582 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2583 length = GET_RTX_LENGTH (code);
2584 last_ptr = NULL;
2585
2586 for (i = 0; i < length; i++)
15bbde2b 2587 {
2588 switch (*format_ptr++)
2589 {
2590 case 'e':
0e0727c4 2591 if (last_ptr)
2592 copy_rtx_if_shared_1 (last_ptr);
2593 last_ptr = &XEXP (x, i);
15bbde2b 2594 break;
2595
2596 case 'E':
2597 if (XVEC (x, i) != NULL)
2598 {
19cb6b50 2599 int j;
ffe0869b 2600 int len = XVECLEN (x, i);
0e0727c4 2601
8b332087 2602 /* Copy the vector iff I copied the rtx and the length
2603 is nonzero. */
ffe0869b 2604 if (copied && len > 0)
a4070a91 2605 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
0e0727c4 2606
d632b59a 2607 /* Call recursively on all inside the vector. */
ffe0869b 2608 for (j = 0; j < len; j++)
0e0727c4 2609 {
2610 if (last_ptr)
2611 copy_rtx_if_shared_1 (last_ptr);
2612 last_ptr = &XVECEXP (x, i, j);
2613 }
15bbde2b 2614 }
2615 break;
2616 }
2617 }
0e0727c4 2618 *orig1 = x;
2619 if (last_ptr)
2620 {
2621 orig1 = last_ptr;
2622 goto repeat;
2623 }
2624 return;
15bbde2b 2625}
2626
2627/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2628 to look for shared sub-parts. */
2629
2630void
35cb5232 2631reset_used_flags (rtx x)
15bbde2b 2632{
19cb6b50 2633 int i, j;
2634 enum rtx_code code;
2635 const char *format_ptr;
0e0727c4 2636 int length;
15bbde2b 2637
0e0727c4 2638 /* Repeat is used to turn tail-recursion into iteration. */
2639repeat:
15bbde2b 2640 if (x == 0)
2641 return;
2642
2643 code = GET_CODE (x);
2644
c3418f42 2645 /* These types may be freely shared so we needn't do any resetting
15bbde2b 2646 for them. */
2647
2648 switch (code)
2649 {
2650 case REG:
15bbde2b 2651 case CONST_INT:
2652 case CONST_DOUBLE:
886cfd4f 2653 case CONST_VECTOR:
15bbde2b 2654 case SYMBOL_REF:
2655 case CODE_LABEL:
2656 case PC:
2657 case CC0:
2658 return;
2659
2660 case INSN:
2661 case JUMP_INSN:
2662 case CALL_INSN:
2663 case NOTE:
2664 case LABEL_REF:
2665 case BARRIER:
2666 /* The chain of insns is not being copied. */
2667 return;
d823ba47 2668
0dbd1c74 2669 default:
2670 break;
15bbde2b 2671 }
2672
7c25cb91 2673 RTX_FLAG (x, used) = 0;
15bbde2b 2674
2675 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2676 length = GET_RTX_LENGTH (code);
2677
2678 for (i = 0; i < length; i++)
15bbde2b 2679 {
2680 switch (*format_ptr++)
2681 {
2682 case 'e':
0e0727c4 2683 if (i == length-1)
2684 {
2685 x = XEXP (x, i);
2686 goto repeat;
2687 }
15bbde2b 2688 reset_used_flags (XEXP (x, i));
2689 break;
2690
2691 case 'E':
2692 for (j = 0; j < XVECLEN (x, i); j++)
2693 reset_used_flags (XVECEXP (x, i, j));
2694 break;
2695 }
2696 }
2697}
1cd4cfea 2698
2699/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2700 to look for shared sub-parts. */
2701
2702void
2703set_used_flags (rtx x)
2704{
2705 int i, j;
2706 enum rtx_code code;
2707 const char *format_ptr;
2708
2709 if (x == 0)
2710 return;
2711
2712 code = GET_CODE (x);
2713
2714 /* These types may be freely shared so we needn't do any resetting
2715 for them. */
2716
2717 switch (code)
2718 {
2719 case REG:
1cd4cfea 2720 case CONST_INT:
2721 case CONST_DOUBLE:
2722 case CONST_VECTOR:
2723 case SYMBOL_REF:
2724 case CODE_LABEL:
2725 case PC:
2726 case CC0:
2727 return;
2728
2729 case INSN:
2730 case JUMP_INSN:
2731 case CALL_INSN:
2732 case NOTE:
2733 case LABEL_REF:
2734 case BARRIER:
2735 /* The chain of insns is not being copied. */
2736 return;
2737
2738 default:
2739 break;
2740 }
2741
2742 RTX_FLAG (x, used) = 1;
2743
2744 format_ptr = GET_RTX_FORMAT (code);
2745 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2746 {
2747 switch (*format_ptr++)
2748 {
2749 case 'e':
2750 set_used_flags (XEXP (x, i));
2751 break;
2752
2753 case 'E':
2754 for (j = 0; j < XVECLEN (x, i); j++)
2755 set_used_flags (XVECEXP (x, i, j));
2756 break;
2757 }
2758 }
2759}
15bbde2b 2760\f
2761/* Copy X if necessary so that it won't be altered by changes in OTHER.
2762 Return X or the rtx for the pseudo reg the value of X was copied into.
2763 OTHER must be valid as a SET_DEST. */
2764
2765rtx
35cb5232 2766make_safe_from (rtx x, rtx other)
15bbde2b 2767{
2768 while (1)
2769 switch (GET_CODE (other))
2770 {
2771 case SUBREG:
2772 other = SUBREG_REG (other);
2773 break;
2774 case STRICT_LOW_PART:
2775 case SIGN_EXTEND:
2776 case ZERO_EXTEND:
2777 other = XEXP (other, 0);
2778 break;
2779 default:
2780 goto done;
2781 }
2782 done:
e16ceb8e 2783 if ((MEM_P (other)
15bbde2b 2784 && ! CONSTANT_P (x)
8ad4c111 2785 && !REG_P (x)
15bbde2b 2786 && GET_CODE (x) != SUBREG)
8ad4c111 2787 || (REG_P (other)
15bbde2b 2788 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2789 || reg_mentioned_p (other, x))))
2790 {
2791 rtx temp = gen_reg_rtx (GET_MODE (x));
2792 emit_move_insn (temp, x);
2793 return temp;
2794 }
2795 return x;
2796}
2797\f
2798/* Emission of insns (adding them to the doubly-linked list). */
2799
2800/* Return the first insn of the current sequence or current function. */
2801
2802rtx
35cb5232 2803get_insns (void)
15bbde2b 2804{
2805 return first_insn;
2806}
2807
fb20d6fa 2808/* Specify a new insn as the first in the chain. */
2809
2810void
35cb5232 2811set_first_insn (rtx insn)
fb20d6fa 2812{
2813 if (PREV_INSN (insn) != 0)
2814 abort ();
2815 first_insn = insn;
2816}
2817
15bbde2b 2818/* Return the last insn emitted in current sequence or current function. */
2819
2820rtx
35cb5232 2821get_last_insn (void)
15bbde2b 2822{
2823 return last_insn;
2824}
2825
2826/* Specify a new insn as the last in the chain. */
2827
2828void
35cb5232 2829set_last_insn (rtx insn)
15bbde2b 2830{
2831 if (NEXT_INSN (insn) != 0)
2832 abort ();
2833 last_insn = insn;
2834}
2835
2836/* Return the last insn emitted, even if it is in a sequence now pushed. */
2837
2838rtx
35cb5232 2839get_last_insn_anywhere (void)
15bbde2b 2840{
2841 struct sequence_stack *stack;
2842 if (last_insn)
2843 return last_insn;
0a893c29 2844 for (stack = seq_stack; stack; stack = stack->next)
15bbde2b 2845 if (stack->last != 0)
2846 return stack->last;
2847 return 0;
2848}
2849
70545de4 2850/* Return the first nonnote insn emitted in current sequence or current
2851 function. This routine looks inside SEQUENCEs. */
2852
2853rtx
35cb5232 2854get_first_nonnote_insn (void)
70545de4 2855{
2856 rtx insn = first_insn;
2857
2858 while (insn)
2859 {
2860 insn = next_insn (insn);
6d7dc5b9 2861 if (insn == 0 || !NOTE_P (insn))
70545de4 2862 break;
2863 }
2864
2865 return insn;
2866}
2867
2868/* Return the last nonnote insn emitted in current sequence or current
2869 function. This routine looks inside SEQUENCEs. */
2870
2871rtx
35cb5232 2872get_last_nonnote_insn (void)
70545de4 2873{
2874 rtx insn = last_insn;
2875
2876 while (insn)
2877 {
2878 insn = previous_insn (insn);
6d7dc5b9 2879 if (insn == 0 || !NOTE_P (insn))
70545de4 2880 break;
2881 }
2882
2883 return insn;
2884}
2885
15bbde2b 2886/* Return a number larger than any instruction's uid in this function. */
2887
2888int
35cb5232 2889get_max_uid (void)
15bbde2b 2890{
2891 return cur_insn_uid;
2892}
90b89d2c 2893
214d02d0 2894/* Renumber instructions so that no instruction UIDs are wasted. */
2895
90b89d2c 2896void
35cb5232 2897renumber_insns (FILE *stream)
90b89d2c 2898{
2899 rtx insn;
90b89d2c 2900
214d02d0 2901 /* If we're not supposed to renumber instructions, don't. */
2902 if (!flag_renumber_insns)
2903 return;
2904
90b89d2c 2905 /* If there aren't that many instructions, then it's not really
2906 worth renumbering them. */
214d02d0 2907 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
90b89d2c 2908 return;
2909
2910 cur_insn_uid = 1;
2911
2912 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
214d02d0 2913 {
2914 if (stream)
d823ba47 2915 fprintf (stream, "Renumbering insn %d to %d\n",
214d02d0 2916 INSN_UID (insn), cur_insn_uid);
2917 INSN_UID (insn) = cur_insn_uid++;
2918 }
90b89d2c 2919}
15bbde2b 2920\f
2921/* Return the next insn. If it is a SEQUENCE, return the first insn
2922 of the sequence. */
2923
2924rtx
35cb5232 2925next_insn (rtx insn)
15bbde2b 2926{
2927 if (insn)
2928 {
2929 insn = NEXT_INSN (insn);
6d7dc5b9 2930 if (insn && NONJUMP_INSN_P (insn)
15bbde2b 2931 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2932 insn = XVECEXP (PATTERN (insn), 0, 0);
2933 }
2934
2935 return insn;
2936}
2937
2938/* Return the previous insn. If it is a SEQUENCE, return the last insn
2939 of the sequence. */
2940
2941rtx
35cb5232 2942previous_insn (rtx insn)
15bbde2b 2943{
2944 if (insn)
2945 {
2946 insn = PREV_INSN (insn);
6d7dc5b9 2947 if (insn && NONJUMP_INSN_P (insn)
15bbde2b 2948 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2949 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2950 }
2951
2952 return insn;
2953}
2954
2955/* Return the next insn after INSN that is not a NOTE. This routine does not
2956 look inside SEQUENCEs. */
2957
2958rtx
35cb5232 2959next_nonnote_insn (rtx insn)
15bbde2b 2960{
2961 while (insn)
2962 {
2963 insn = NEXT_INSN (insn);
6d7dc5b9 2964 if (insn == 0 || !NOTE_P (insn))
15bbde2b 2965 break;
2966 }
2967
2968 return insn;
2969}
2970
2971/* Return the previous insn before INSN that is not a NOTE. This routine does
2972 not look inside SEQUENCEs. */
2973
2974rtx
35cb5232 2975prev_nonnote_insn (rtx insn)
15bbde2b 2976{
2977 while (insn)
2978 {
2979 insn = PREV_INSN (insn);
6d7dc5b9 2980 if (insn == 0 || !NOTE_P (insn))
15bbde2b 2981 break;
2982 }
2983
2984 return insn;
2985}
2986
2987/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2988 or 0, if there is none. This routine does not look inside
a92771b8 2989 SEQUENCEs. */
15bbde2b 2990
2991rtx
35cb5232 2992next_real_insn (rtx insn)
15bbde2b 2993{
2994 while (insn)
2995 {
2996 insn = NEXT_INSN (insn);
e086b176 2997 if (insn == 0 || INSN_P (insn))
15bbde2b 2998 break;
2999 }
3000
3001 return insn;
3002}
3003
3004/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3005 or 0, if there is none. This routine does not look inside
3006 SEQUENCEs. */
3007
3008rtx
35cb5232 3009prev_real_insn (rtx insn)
15bbde2b 3010{
3011 while (insn)
3012 {
3013 insn = PREV_INSN (insn);
e086b176 3014 if (insn == 0 || INSN_P (insn))
15bbde2b 3015 break;
3016 }
3017
3018 return insn;
3019}
3020
d5f9786f 3021/* Return the last CALL_INSN in the current list, or 0 if there is none.
3022 This routine does not look inside SEQUENCEs. */
3023
3024rtx
35cb5232 3025last_call_insn (void)
d5f9786f 3026{
3027 rtx insn;
3028
3029 for (insn = get_last_insn ();
6d7dc5b9 3030 insn && !CALL_P (insn);
d5f9786f 3031 insn = PREV_INSN (insn))
3032 ;
3033
3034 return insn;
3035}
3036
15bbde2b 3037/* Find the next insn after INSN that really does something. This routine
3038 does not look inside SEQUENCEs. Until reload has completed, this is the
3039 same as next_real_insn. */
3040
2215ca0d 3041int
35cb5232 3042active_insn_p (rtx insn)
2215ca0d 3043{
6d7dc5b9 3044 return (CALL_P (insn) || JUMP_P (insn)
3045 || (NONJUMP_INSN_P (insn)
3a66feab 3046 && (! reload_completed
3047 || (GET_CODE (PATTERN (insn)) != USE
3048 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3049}
3050
15bbde2b 3051rtx
35cb5232 3052next_active_insn (rtx insn)
15bbde2b 3053{
3054 while (insn)
3055 {
3056 insn = NEXT_INSN (insn);
2215ca0d 3057 if (insn == 0 || active_insn_p (insn))
15bbde2b 3058 break;
3059 }
3060
3061 return insn;
3062}
3063
3064/* Find the last insn before INSN that really does something. This routine
3065 does not look inside SEQUENCEs. Until reload has completed, this is the
3066 same as prev_real_insn. */
3067
3068rtx
35cb5232 3069prev_active_insn (rtx insn)
15bbde2b 3070{
3071 while (insn)
3072 {
3073 insn = PREV_INSN (insn);
2215ca0d 3074 if (insn == 0 || active_insn_p (insn))
15bbde2b 3075 break;
3076 }
3077
3078 return insn;
3079}
3080
3081/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3082
3083rtx
35cb5232 3084next_label (rtx insn)
15bbde2b 3085{
3086 while (insn)
3087 {
3088 insn = NEXT_INSN (insn);
6d7dc5b9 3089 if (insn == 0 || LABEL_P (insn))
15bbde2b 3090 break;
3091 }
3092
3093 return insn;
3094}
3095
3096/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3097
3098rtx
35cb5232 3099prev_label (rtx insn)
15bbde2b 3100{
3101 while (insn)
3102 {
3103 insn = PREV_INSN (insn);
6d7dc5b9 3104 if (insn == 0 || LABEL_P (insn))
15bbde2b 3105 break;
3106 }
3107
3108 return insn;
3109}
67c5e2a9 3110
3111/* Return the last label to mark the same position as LABEL. Return null
3112 if LABEL itself is null. */
3113
3114rtx
3115skip_consecutive_labels (rtx label)
3116{
3117 rtx insn;
3118
3119 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3120 if (LABEL_P (insn))
3121 label = insn;
3122
3123 return label;
3124}
15bbde2b 3125\f
3126#ifdef HAVE_cc0
b15e0bba 3127/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3128 and REG_CC_USER notes so we can find it. */
3129
3130void
35cb5232 3131link_cc0_insns (rtx insn)
b15e0bba 3132{
3133 rtx user = next_nonnote_insn (insn);
3134
6d7dc5b9 3135 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
b15e0bba 3136 user = XVECEXP (PATTERN (user), 0, 0);
3137
7014838c 3138 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3139 REG_NOTES (user));
3ad7bb1c 3140 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
b15e0bba 3141}
3142
15bbde2b 3143/* Return the next insn that uses CC0 after INSN, which is assumed to
3144 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3145 applied to the result of this function should yield INSN).
3146
3147 Normally, this is simply the next insn. However, if a REG_CC_USER note
3148 is present, it contains the insn that uses CC0.
3149
3150 Return 0 if we can't find the insn. */
3151
3152rtx
35cb5232 3153next_cc0_user (rtx insn)
15bbde2b 3154{
b572011e 3155 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3156
3157 if (note)
3158 return XEXP (note, 0);
3159
3160 insn = next_nonnote_insn (insn);
6d7dc5b9 3161 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
15bbde2b 3162 insn = XVECEXP (PATTERN (insn), 0, 0);
3163
9204e736 3164 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
15bbde2b 3165 return insn;
3166
3167 return 0;
3168}
3169
3170/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3171 note, it is the previous insn. */
3172
3173rtx
35cb5232 3174prev_cc0_setter (rtx insn)
15bbde2b 3175{
b572011e 3176 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3177
3178 if (note)
3179 return XEXP (note, 0);
3180
3181 insn = prev_nonnote_insn (insn);
3182 if (! sets_cc0_p (PATTERN (insn)))
3183 abort ();
3184
3185 return insn;
3186}
3187#endif
344dc2fa 3188
3189/* Increment the label uses for all labels present in rtx. */
3190
3191static void
35cb5232 3192mark_label_nuses (rtx x)
344dc2fa 3193{
19cb6b50 3194 enum rtx_code code;
3195 int i, j;
3196 const char *fmt;
344dc2fa 3197
3198 code = GET_CODE (x);
a030d4a8 3199 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
344dc2fa 3200 LABEL_NUSES (XEXP (x, 0))++;
3201
3202 fmt = GET_RTX_FORMAT (code);
3203 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3204 {
3205 if (fmt[i] == 'e')
ff385626 3206 mark_label_nuses (XEXP (x, i));
344dc2fa 3207 else if (fmt[i] == 'E')
ff385626 3208 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3209 mark_label_nuses (XVECEXP (x, i, j));
3210 }
3211}
3212
15bbde2b 3213\f
3214/* Try splitting insns that can be split for better scheduling.
3215 PAT is the pattern which might split.
3216 TRIAL is the insn providing PAT.
6ef828f9 3217 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3218
3219 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3220 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3221 returns TRIAL. If the insn to be returned can be split, it will be. */
3222
3223rtx
35cb5232 3224try_split (rtx pat, rtx trial, int last)
15bbde2b 3225{
3226 rtx before = PREV_INSN (trial);
3227 rtx after = NEXT_INSN (trial);
15bbde2b 3228 int has_barrier = 0;
3229 rtx tem;
3cd757b1 3230 rtx note, seq;
3231 int probability;
e13693ec 3232 rtx insn_last, insn;
3233 int njumps = 0;
3cd757b1 3234
3235 if (any_condjump_p (trial)
3236 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3237 split_branch_probability = INTVAL (XEXP (note, 0));
3238 probability = split_branch_probability;
3239
3240 seq = split_insns (pat, trial);
3241
3242 split_branch_probability = -1;
15bbde2b 3243
3244 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3245 We may need to handle this specially. */
6d7dc5b9 3246 if (after && BARRIER_P (after))
15bbde2b 3247 {
3248 has_barrier = 1;
3249 after = NEXT_INSN (after);
3250 }
3251
e13693ec 3252 if (!seq)
3253 return trial;
3254
3255 /* Avoid infinite loop if any insn of the result matches
3256 the original pattern. */
3257 insn_last = seq;
3258 while (1)
15bbde2b 3259 {
e13693ec 3260 if (INSN_P (insn_last)
3261 && rtx_equal_p (PATTERN (insn_last), pat))
3262 return trial;
3263 if (!NEXT_INSN (insn_last))
3264 break;
3265 insn_last = NEXT_INSN (insn_last);
3266 }
d823ba47 3267
e13693ec 3268 /* Mark labels. */
3269 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3270 {
6d7dc5b9 3271 if (JUMP_P (insn))
e13693ec 3272 {
3273 mark_jump_label (PATTERN (insn), insn, 0);
3274 njumps++;
3275 if (probability != -1
3276 && any_condjump_p (insn)
3277 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3278 {
e13693ec 3279 /* We can preserve the REG_BR_PROB notes only if exactly
3280 one jump is created, otherwise the machine description
3281 is responsible for this step using
3282 split_branch_probability variable. */
3283 if (njumps != 1)
3284 abort ();
3285 REG_NOTES (insn)
3286 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3287 GEN_INT (probability),
3288 REG_NOTES (insn));
31d3e01c 3289 }
e13693ec 3290 }
3291 }
3292
3293 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3294 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
6d7dc5b9 3295 if (CALL_P (trial))
e13693ec 3296 {
3297 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
6d7dc5b9 3298 if (CALL_P (insn))
e13693ec 3299 {
0bb5a6cd 3300 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3301 while (*p)
3302 p = &XEXP (*p, 1);
3303 *p = CALL_INSN_FUNCTION_USAGE (trial);
e13693ec 3304 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3305 }
3306 }
5262c253 3307
e13693ec 3308 /* Copy notes, particularly those related to the CFG. */
3309 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3310 {
3311 switch (REG_NOTE_KIND (note))
3312 {
3313 case REG_EH_REGION:
31d3e01c 3314 insn = insn_last;
3315 while (insn != NULL_RTX)
3316 {
6d7dc5b9 3317 if (CALL_P (insn)
e13693ec 3318 || (flag_non_call_exceptions
3319 && may_trap_p (PATTERN (insn))))
3320 REG_NOTES (insn)
3321 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3322 XEXP (note, 0),
3323 REG_NOTES (insn));
31d3e01c 3324 insn = PREV_INSN (insn);
3325 }
e13693ec 3326 break;
381eb1e7 3327
e13693ec 3328 case REG_NORETURN:
3329 case REG_SETJMP:
3330 case REG_ALWAYS_RETURN:
3331 insn = insn_last;
3332 while (insn != NULL_RTX)
381eb1e7 3333 {
6d7dc5b9 3334 if (CALL_P (insn))
e13693ec 3335 REG_NOTES (insn)
3336 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3337 XEXP (note, 0),
3338 REG_NOTES (insn));
3339 insn = PREV_INSN (insn);
381eb1e7 3340 }
e13693ec 3341 break;
5bb27a4b 3342
e13693ec 3343 case REG_NON_LOCAL_GOTO:
3344 insn = insn_last;
3345 while (insn != NULL_RTX)
31d3e01c 3346 {
6d7dc5b9 3347 if (JUMP_P (insn))
e13693ec 3348 REG_NOTES (insn)
3349 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3350 XEXP (note, 0),
3351 REG_NOTES (insn));
3352 insn = PREV_INSN (insn);
31d3e01c 3353 }
e13693ec 3354 break;
344dc2fa 3355
e13693ec 3356 default:
3357 break;
15bbde2b 3358 }
e13693ec 3359 }
3360
3361 /* If there are LABELS inside the split insns increment the
3362 usage count so we don't delete the label. */
6d7dc5b9 3363 if (NONJUMP_INSN_P (trial))
e13693ec 3364 {
3365 insn = insn_last;
3366 while (insn != NULL_RTX)
15bbde2b 3367 {
6d7dc5b9 3368 if (NONJUMP_INSN_P (insn))
e13693ec 3369 mark_label_nuses (PATTERN (insn));
15bbde2b 3370
e13693ec 3371 insn = PREV_INSN (insn);
3372 }
15bbde2b 3373 }
3374
13751393 3375 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
e13693ec 3376
3377 delete_insn (trial);
3378 if (has_barrier)
3379 emit_barrier_after (tem);
3380
3381 /* Recursively call try_split for each new insn created; by the
3382 time control returns here that insn will be fully split, so
3383 set LAST and continue from the insn after the one returned.
3384 We can't use next_active_insn here since AFTER may be a note.
3385 Ignore deleted insns, which can be occur if not optimizing. */
3386 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3387 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3388 tem = try_split (PATTERN (tem), tem, 1);
3389
3390 /* Return either the first or the last insn, depending on which was
3391 requested. */
3392 return last
3393 ? (after ? PREV_INSN (after) : last_insn)
3394 : NEXT_INSN (before);
15bbde2b 3395}
3396\f
3397/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3398 Store PATTERN in the pattern slots. */
15bbde2b 3399
3400rtx
35cb5232 3401make_insn_raw (rtx pattern)
15bbde2b 3402{
19cb6b50 3403 rtx insn;
15bbde2b 3404
d7c47c0e 3405 insn = rtx_alloc (INSN);
15bbde2b 3406
575333f9 3407 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3408 PATTERN (insn) = pattern;
3409 INSN_CODE (insn) = -1;
fc92fa61 3410 LOG_LINKS (insn) = NULL;
3411 REG_NOTES (insn) = NULL;
13751393 3412 INSN_LOCATOR (insn) = 0;
ab87d1bc 3413 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3414
fe7f701d 3415#ifdef ENABLE_RTL_CHECKING
3416 if (insn
9204e736 3417 && INSN_P (insn)
fe7f701d 3418 && (returnjump_p (insn)
3419 || (GET_CODE (insn) == SET
3420 && SET_DEST (insn) == pc_rtx)))
3421 {
3422 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3423 debug_rtx (insn);
3424 }
3425#endif
d823ba47 3426
15bbde2b 3427 return insn;
3428}
3429
31d3e01c 3430/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3431
3432static rtx
35cb5232 3433make_jump_insn_raw (rtx pattern)
15bbde2b 3434{
19cb6b50 3435 rtx insn;
15bbde2b 3436
6a84e367 3437 insn = rtx_alloc (JUMP_INSN);
fc92fa61 3438 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3439
3440 PATTERN (insn) = pattern;
3441 INSN_CODE (insn) = -1;
fc92fa61 3442 LOG_LINKS (insn) = NULL;
3443 REG_NOTES (insn) = NULL;
3444 JUMP_LABEL (insn) = NULL;
13751393 3445 INSN_LOCATOR (insn) = 0;
ab87d1bc 3446 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3447
3448 return insn;
3449}
6e911104 3450
31d3e01c 3451/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3452
3453static rtx
35cb5232 3454make_call_insn_raw (rtx pattern)
6e911104 3455{
19cb6b50 3456 rtx insn;
6e911104 3457
3458 insn = rtx_alloc (CALL_INSN);
3459 INSN_UID (insn) = cur_insn_uid++;
3460
3461 PATTERN (insn) = pattern;
3462 INSN_CODE (insn) = -1;
3463 LOG_LINKS (insn) = NULL;
3464 REG_NOTES (insn) = NULL;
3465 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
13751393 3466 INSN_LOCATOR (insn) = 0;
ab87d1bc 3467 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3468
3469 return insn;
3470}
15bbde2b 3471\f
3472/* Add INSN to the end of the doubly-linked list.
3473 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3474
3475void
35cb5232 3476add_insn (rtx insn)
15bbde2b 3477{
3478 PREV_INSN (insn) = last_insn;
3479 NEXT_INSN (insn) = 0;
3480
3481 if (NULL != last_insn)
3482 NEXT_INSN (last_insn) = insn;
3483
3484 if (NULL == first_insn)
3485 first_insn = insn;
3486
3487 last_insn = insn;
3488}
3489
312de84d 3490/* Add INSN into the doubly-linked list after insn AFTER. This and
3491 the next should be the only functions called to insert an insn once
f65c10c0 3492 delay slots have been filled since only they know how to update a
312de84d 3493 SEQUENCE. */
15bbde2b 3494
3495void
35cb5232 3496add_insn_after (rtx insn, rtx after)
15bbde2b 3497{
3498 rtx next = NEXT_INSN (after);
9dda7915 3499 basic_block bb;
15bbde2b 3500
9ea33026 3501 if (optimize && INSN_DELETED_P (after))
f65c10c0 3502 abort ();
3503
15bbde2b 3504 NEXT_INSN (insn) = next;
3505 PREV_INSN (insn) = after;
3506
3507 if (next)
3508 {
3509 PREV_INSN (next) = insn;
6d7dc5b9 3510 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
15bbde2b 3511 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3512 }
3513 else if (last_insn == after)
3514 last_insn = insn;
3515 else
3516 {
0a893c29 3517 struct sequence_stack *stack = seq_stack;
15bbde2b 3518 /* Scan all pending sequences too. */
3519 for (; stack; stack = stack->next)
3520 if (after == stack->last)
398f4855 3521 {
3522 stack->last = insn;
3523 break;
3524 }
312de84d 3525
3526 if (stack == 0)
3527 abort ();
15bbde2b 3528 }
3529
6d7dc5b9 3530 if (!BARRIER_P (after)
3531 && !BARRIER_P (insn)
9dda7915 3532 && (bb = BLOCK_FOR_INSN (after)))
3533 {
3534 set_block_for_insn (insn, bb);
308f9b79 3535 if (INSN_P (insn))
d4c5e26d 3536 bb->flags |= BB_DIRTY;
9dda7915 3537 /* Should not happen as first in the BB is always
3fb1e43b 3538 either NOTE or LABEL. */
5496dbfc 3539 if (BB_END (bb) == after
9dda7915 3540 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 3541 && !BARRIER_P (insn)
3542 && (!NOTE_P (insn)
9dda7915 3543 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
5496dbfc 3544 BB_END (bb) = insn;
9dda7915 3545 }
3546
15bbde2b 3547 NEXT_INSN (after) = insn;
6d7dc5b9 3548 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
15bbde2b 3549 {
3550 rtx sequence = PATTERN (after);
3551 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3552 }
3553}
3554
312de84d 3555/* Add INSN into the doubly-linked list before insn BEFORE. This and
3556 the previous should be the only functions called to insert an insn once
f65c10c0 3557 delay slots have been filled since only they know how to update a
312de84d 3558 SEQUENCE. */
3559
3560void
35cb5232 3561add_insn_before (rtx insn, rtx before)
312de84d 3562{
3563 rtx prev = PREV_INSN (before);
9dda7915 3564 basic_block bb;
312de84d 3565
9ea33026 3566 if (optimize && INSN_DELETED_P (before))
f65c10c0 3567 abort ();
3568
312de84d 3569 PREV_INSN (insn) = prev;
3570 NEXT_INSN (insn) = before;
3571
3572 if (prev)
3573 {
3574 NEXT_INSN (prev) = insn;
6d7dc5b9 3575 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
312de84d 3576 {
3577 rtx sequence = PATTERN (prev);
3578 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3579 }
3580 }
3581 else if (first_insn == before)
3582 first_insn = insn;
3583 else
3584 {
0a893c29 3585 struct sequence_stack *stack = seq_stack;
312de84d 3586 /* Scan all pending sequences too. */
3587 for (; stack; stack = stack->next)
3588 if (before == stack->first)
398f4855 3589 {
3590 stack->first = insn;
3591 break;
3592 }
312de84d 3593
3594 if (stack == 0)
3595 abort ();
3596 }
3597
6d7dc5b9 3598 if (!BARRIER_P (before)
3599 && !BARRIER_P (insn)
9dda7915 3600 && (bb = BLOCK_FOR_INSN (before)))
3601 {
3602 set_block_for_insn (insn, bb);
308f9b79 3603 if (INSN_P (insn))
d4c5e26d 3604 bb->flags |= BB_DIRTY;
9dda7915 3605 /* Should not happen as first in the BB is always
3fb1e43b 3606 either NOTE or LABEl. */
5496dbfc 3607 if (BB_HEAD (bb) == insn
9dda7915 3608 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 3609 && !BARRIER_P (insn)
3610 && (!NOTE_P (insn)
9dda7915 3611 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3612 abort ();
3613 }
3614
312de84d 3615 PREV_INSN (before) = insn;
6d7dc5b9 3616 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
312de84d 3617 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3618}
3619
7ddcf2bf 3620/* Remove an insn from its doubly-linked list. This function knows how
3621 to handle sequences. */
3622void
35cb5232 3623remove_insn (rtx insn)
7ddcf2bf 3624{
3625 rtx next = NEXT_INSN (insn);
3626 rtx prev = PREV_INSN (insn);
e4bf866d 3627 basic_block bb;
3628
7ddcf2bf 3629 if (prev)
3630 {
3631 NEXT_INSN (prev) = next;
6d7dc5b9 3632 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
7ddcf2bf 3633 {
3634 rtx sequence = PATTERN (prev);
3635 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3636 }
3637 }
3638 else if (first_insn == insn)
3639 first_insn = next;
3640 else
3641 {
0a893c29 3642 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3643 /* Scan all pending sequences too. */
3644 for (; stack; stack = stack->next)
3645 if (insn == stack->first)
3646 {
3647 stack->first = next;
3648 break;
3649 }
3650
3651 if (stack == 0)
3652 abort ();
3653 }
3654
3655 if (next)
3656 {
3657 PREV_INSN (next) = prev;
6d7dc5b9 3658 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
7ddcf2bf 3659 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3660 }
3661 else if (last_insn == insn)
3662 last_insn = prev;
3663 else
3664 {
0a893c29 3665 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3666 /* Scan all pending sequences too. */
3667 for (; stack; stack = stack->next)
3668 if (insn == stack->last)
3669 {
3670 stack->last = prev;
3671 break;
3672 }
3673
3674 if (stack == 0)
3675 abort ();
3676 }
6d7dc5b9 3677 if (!BARRIER_P (insn)
e4bf866d 3678 && (bb = BLOCK_FOR_INSN (insn)))
3679 {
308f9b79 3680 if (INSN_P (insn))
d4c5e26d 3681 bb->flags |= BB_DIRTY;
5496dbfc 3682 if (BB_HEAD (bb) == insn)
e4bf866d 3683 {
f4aee538 3684 /* Never ever delete the basic block note without deleting whole
3685 basic block. */
6d7dc5b9 3686 if (NOTE_P (insn))
e4bf866d 3687 abort ();
5496dbfc 3688 BB_HEAD (bb) = next;
e4bf866d 3689 }
5496dbfc 3690 if (BB_END (bb) == insn)
3691 BB_END (bb) = prev;
e4bf866d 3692 }
7ddcf2bf 3693}
3694
d5f9786f 3695/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3696
3697void
35cb5232 3698add_function_usage_to (rtx call_insn, rtx call_fusage)
d5f9786f 3699{
6d7dc5b9 3700 if (! call_insn || !CALL_P (call_insn))
d5f9786f 3701 abort ();
3702
3703 /* Put the register usage information on the CALL. If there is already
3704 some usage information, put ours at the end. */
3705 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3706 {
3707 rtx link;
3708
3709 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3710 link = XEXP (link, 1))
3711 ;
3712
3713 XEXP (link, 1) = call_fusage;
3714 }
3715 else
3716 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3717}
3718
15bbde2b 3719/* Delete all insns made since FROM.
3720 FROM becomes the new last instruction. */
3721
3722void
35cb5232 3723delete_insns_since (rtx from)
15bbde2b 3724{
3725 if (from == 0)
3726 first_insn = 0;
3727 else
3728 NEXT_INSN (from) = 0;
3729 last_insn = from;
3730}
3731
34e2ddcd 3732/* This function is deprecated, please use sequences instead.
3733
3734 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 3735 The insns to be moved are those between FROM and TO.
3736 They are moved to a new position after the insn AFTER.
3737 AFTER must not be FROM or TO or any insn in between.
3738
3739 This function does not know about SEQUENCEs and hence should not be
3740 called after delay-slot filling has been done. */
3741
3742void
35cb5232 3743reorder_insns_nobb (rtx from, rtx to, rtx after)
15bbde2b 3744{
3745 /* Splice this bunch out of where it is now. */
3746 if (PREV_INSN (from))
3747 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3748 if (NEXT_INSN (to))
3749 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3750 if (last_insn == to)
3751 last_insn = PREV_INSN (from);
3752 if (first_insn == from)
3753 first_insn = NEXT_INSN (to);
3754
3755 /* Make the new neighbors point to it and it to them. */
3756 if (NEXT_INSN (after))
3757 PREV_INSN (NEXT_INSN (after)) = to;
3758
3759 NEXT_INSN (to) = NEXT_INSN (after);
3760 PREV_INSN (from) = after;
3761 NEXT_INSN (after) = from;
3762 if (after == last_insn)
3763 last_insn = to;
3764}
3765
9dda7915 3766/* Same as function above, but take care to update BB boundaries. */
3767void
35cb5232 3768reorder_insns (rtx from, rtx to, rtx after)
9dda7915 3769{
3770 rtx prev = PREV_INSN (from);
3771 basic_block bb, bb2;
3772
3773 reorder_insns_nobb (from, to, after);
3774
6d7dc5b9 3775 if (!BARRIER_P (after)
9dda7915 3776 && (bb = BLOCK_FOR_INSN (after)))
3777 {
3778 rtx x;
308f9b79 3779 bb->flags |= BB_DIRTY;
d4c5e26d 3780
6d7dc5b9 3781 if (!BARRIER_P (from)
9dda7915 3782 && (bb2 = BLOCK_FOR_INSN (from)))
3783 {
5496dbfc 3784 if (BB_END (bb2) == to)
3785 BB_END (bb2) = prev;
308f9b79 3786 bb2->flags |= BB_DIRTY;
9dda7915 3787 }
3788
5496dbfc 3789 if (BB_END (bb) == after)
3790 BB_END (bb) = to;
9dda7915 3791
3792 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7097dd0c 3793 if (!BARRIER_P (x))
3794 set_block_for_insn (x, bb);
9dda7915 3795 }
3796}
3797
15bbde2b 3798/* Return the line note insn preceding INSN. */
3799
3800static rtx
35cb5232 3801find_line_note (rtx insn)
15bbde2b 3802{
3803 if (no_line_numbers)
3804 return 0;
3805
3806 for (; insn; insn = PREV_INSN (insn))
6d7dc5b9 3807 if (NOTE_P (insn)
ff385626 3808 && NOTE_LINE_NUMBER (insn) >= 0)
15bbde2b 3809 break;
3810
3811 return insn;
3812}
3813
0a78547b 3814/* Remove unnecessary notes from the instruction stream. */
90b89d2c 3815
3816void
35cb5232 3817remove_unnecessary_notes (void)
90b89d2c 3818{
92cfc4a8 3819 rtx block_stack = NULL_RTX;
3820 rtx eh_stack = NULL_RTX;
90b89d2c 3821 rtx insn;
3822 rtx next;
92cfc4a8 3823 rtx tmp;
90b89d2c 3824
f1ab82be 3825 /* We must not remove the first instruction in the function because
3826 the compiler depends on the first instruction being a note. */
90b89d2c 3827 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3828 {
3829 /* Remember what's next. */
3830 next = NEXT_INSN (insn);
3831
3832 /* We're only interested in notes. */
6d7dc5b9 3833 if (!NOTE_P (insn))
90b89d2c 3834 continue;
3835
92cfc4a8 3836 switch (NOTE_LINE_NUMBER (insn))
5846cb0f 3837 {
92cfc4a8 3838 case NOTE_INSN_DELETED:
3839 remove_insn (insn);
3840 break;
3841
3842 case NOTE_INSN_EH_REGION_BEG:
3843 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3844 break;
3845
3846 case NOTE_INSN_EH_REGION_END:
3847 /* Too many end notes. */
3848 if (eh_stack == NULL_RTX)
3849 abort ();
3850 /* Mismatched nesting. */
3851 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3852 abort ();
3853 tmp = eh_stack;
3854 eh_stack = XEXP (eh_stack, 1);
3855 free_INSN_LIST_node (tmp);
3856 break;
3857
3858 case NOTE_INSN_BLOCK_BEG:
3859 /* By now, all notes indicating lexical blocks should have
3860 NOTE_BLOCK filled in. */
3861 if (NOTE_BLOCK (insn) == NULL_TREE)
3862 abort ();
3863 block_stack = alloc_INSN_LIST (insn, block_stack);
3864 break;
3865
3866 case NOTE_INSN_BLOCK_END:
3867 /* Too many end notes. */
3868 if (block_stack == NULL_RTX)
3869 abort ();
3870 /* Mismatched nesting. */
3871 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3872 abort ();
3873 tmp = block_stack;
3874 block_stack = XEXP (block_stack, 1);
3875 free_INSN_LIST_node (tmp);
3876
5846cb0f 3877 /* Scan back to see if there are any non-note instructions
3878 between INSN and the beginning of this block. If not,
3879 then there is no PC range in the generated code that will
3880 actually be in this block, so there's no point in
3881 remembering the existence of the block. */
d4c5e26d 3882 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
5846cb0f 3883 {
3884 /* This block contains a real instruction. Note that we
3885 don't include labels; if the only thing in the block
3886 is a label, then there are still no PC values that
3887 lie within the block. */
92cfc4a8 3888 if (INSN_P (tmp))
5846cb0f 3889 break;
3890
3891 /* We're only interested in NOTEs. */
6d7dc5b9 3892 if (!NOTE_P (tmp))
5846cb0f 3893 continue;
3894
92cfc4a8 3895 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
5846cb0f 3896 {
b29760a8 3897 /* We just verified that this BLOCK matches us with
3898 the block_stack check above. Never delete the
3899 BLOCK for the outermost scope of the function; we
3900 can refer to names from that scope even if the
3901 block notes are messed up. */
3902 if (! is_body_block (NOTE_BLOCK (insn))
3903 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
e33dbbdf 3904 {
92cfc4a8 3905 remove_insn (tmp);
e33dbbdf 3906 remove_insn (insn);
3907 }
5846cb0f 3908 break;
3909 }
92cfc4a8 3910 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
5846cb0f 3911 /* There's a nested block. We need to leave the
3912 current block in place since otherwise the debugger
3913 wouldn't be able to show symbols from our block in
3914 the nested block. */
3915 break;
3916 }
3917 }
90b89d2c 3918 }
92cfc4a8 3919
3920 /* Too many begin notes. */
3921 if (block_stack || eh_stack)
3922 abort ();
90b89d2c 3923}
3924
15bbde2b 3925\f
31d3e01c 3926/* Emit insn(s) of given code and pattern
3927 at a specified place within the doubly-linked list.
15bbde2b 3928
31d3e01c 3929 All of the emit_foo global entry points accept an object
3930 X which is either an insn list or a PATTERN of a single
3931 instruction.
15bbde2b 3932
31d3e01c 3933 There are thus a few canonical ways to generate code and
3934 emit it at a specific place in the instruction stream. For
3935 example, consider the instruction named SPOT and the fact that
3936 we would like to emit some instructions before SPOT. We might
3937 do it like this:
15bbde2b 3938
31d3e01c 3939 start_sequence ();
3940 ... emit the new instructions ...
3941 insns_head = get_insns ();
3942 end_sequence ();
15bbde2b 3943
31d3e01c 3944 emit_insn_before (insns_head, SPOT);
15bbde2b 3945
31d3e01c 3946 It used to be common to generate SEQUENCE rtl instead, but that
3947 is a relic of the past which no longer occurs. The reason is that
3948 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3949 generated would almost certainly die right after it was created. */
15bbde2b 3950
31d3e01c 3951/* Make X be output before the instruction BEFORE. */
15bbde2b 3952
3953rtx
35cb5232 3954emit_insn_before (rtx x, rtx before)
15bbde2b 3955{
31d3e01c 3956 rtx last = before;
19cb6b50 3957 rtx insn;
15bbde2b 3958
31d3e01c 3959#ifdef ENABLE_RTL_CHECKING
3960 if (before == NULL_RTX)
3961 abort ();
3962#endif
3963
3964 if (x == NULL_RTX)
3965 return last;
3966
3967 switch (GET_CODE (x))
15bbde2b 3968 {
31d3e01c 3969 case INSN:
3970 case JUMP_INSN:
3971 case CALL_INSN:
3972 case CODE_LABEL:
3973 case BARRIER:
3974 case NOTE:
3975 insn = x;
3976 while (insn)
3977 {
3978 rtx next = NEXT_INSN (insn);
3979 add_insn_before (insn, before);
3980 last = insn;
3981 insn = next;
3982 }
3983 break;
3984
3985#ifdef ENABLE_RTL_CHECKING
3986 case SEQUENCE:
3987 abort ();
3988 break;
3989#endif
3990
3991 default:
3992 last = make_insn_raw (x);
3993 add_insn_before (last, before);
3994 break;
15bbde2b 3995 }
3996
31d3e01c 3997 return last;
15bbde2b 3998}
3999
31d3e01c 4000/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4001 and output it before the instruction BEFORE. */
4002
4003rtx
35cb5232 4004emit_jump_insn_before (rtx x, rtx before)
15bbde2b 4005{
d90b3d04 4006 rtx insn, last = NULL_RTX;
6e911104 4007
31d3e01c 4008#ifdef ENABLE_RTL_CHECKING
4009 if (before == NULL_RTX)
4010 abort ();
4011#endif
4012
4013 switch (GET_CODE (x))
6e911104 4014 {
31d3e01c 4015 case INSN:
4016 case JUMP_INSN:
4017 case CALL_INSN:
4018 case CODE_LABEL:
4019 case BARRIER:
4020 case NOTE:
4021 insn = x;
4022 while (insn)
4023 {
4024 rtx next = NEXT_INSN (insn);
4025 add_insn_before (insn, before);
4026 last = insn;
4027 insn = next;
4028 }
4029 break;
4030
4031#ifdef ENABLE_RTL_CHECKING
4032 case SEQUENCE:
4033 abort ();
4034 break;
4035#endif
4036
4037 default:
4038 last = make_jump_insn_raw (x);
4039 add_insn_before (last, before);
4040 break;
6e911104 4041 }
4042
31d3e01c 4043 return last;
15bbde2b 4044}
4045
31d3e01c 4046/* Make an instruction with body X and code CALL_INSN
cd0fe062 4047 and output it before the instruction BEFORE. */
4048
4049rtx
35cb5232 4050emit_call_insn_before (rtx x, rtx before)
cd0fe062 4051{
d90b3d04 4052 rtx last = NULL_RTX, insn;
cd0fe062 4053
31d3e01c 4054#ifdef ENABLE_RTL_CHECKING
4055 if (before == NULL_RTX)
4056 abort ();
4057#endif
4058
4059 switch (GET_CODE (x))
cd0fe062 4060 {
31d3e01c 4061 case INSN:
4062 case JUMP_INSN:
4063 case CALL_INSN:
4064 case CODE_LABEL:
4065 case BARRIER:
4066 case NOTE:
4067 insn = x;
4068 while (insn)
4069 {
4070 rtx next = NEXT_INSN (insn);
4071 add_insn_before (insn, before);
4072 last = insn;
4073 insn = next;
4074 }
4075 break;
4076
4077#ifdef ENABLE_RTL_CHECKING
4078 case SEQUENCE:
4079 abort ();
4080 break;
4081#endif
4082
4083 default:
4084 last = make_call_insn_raw (x);
4085 add_insn_before (last, before);
4086 break;
cd0fe062 4087 }
4088
31d3e01c 4089 return last;
cd0fe062 4090}
4091
15bbde2b 4092/* Make an insn of code BARRIER
71caadc0 4093 and output it before the insn BEFORE. */
15bbde2b 4094
4095rtx
35cb5232 4096emit_barrier_before (rtx before)
15bbde2b 4097{
19cb6b50 4098 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4099
4100 INSN_UID (insn) = cur_insn_uid++;
4101
312de84d 4102 add_insn_before (insn, before);
15bbde2b 4103 return insn;
4104}
4105
71caadc0 4106/* Emit the label LABEL before the insn BEFORE. */
4107
4108rtx
35cb5232 4109emit_label_before (rtx label, rtx before)
71caadc0 4110{
4111 /* This can be called twice for the same label as a result of the
4112 confusion that follows a syntax error! So make it harmless. */
4113 if (INSN_UID (label) == 0)
4114 {
4115 INSN_UID (label) = cur_insn_uid++;
4116 add_insn_before (label, before);
4117 }
4118
4119 return label;
4120}
4121
15bbde2b 4122/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4123
4124rtx
35cb5232 4125emit_note_before (int subtype, rtx before)
15bbde2b 4126{
19cb6b50 4127 rtx note = rtx_alloc (NOTE);
15bbde2b 4128 INSN_UID (note) = cur_insn_uid++;
7bd3dcc4 4129#ifndef USE_MAPPED_LOCATION
15bbde2b 4130 NOTE_SOURCE_FILE (note) = 0;
7bd3dcc4 4131#endif
15bbde2b 4132 NOTE_LINE_NUMBER (note) = subtype;
ab87d1bc 4133 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4134
312de84d 4135 add_insn_before (note, before);
15bbde2b 4136 return note;
4137}
4138\f
31d3e01c 4139/* Helper for emit_insn_after, handles lists of instructions
4140 efficiently. */
15bbde2b 4141
35cb5232 4142static rtx emit_insn_after_1 (rtx, rtx);
31d3e01c 4143
4144static rtx
35cb5232 4145emit_insn_after_1 (rtx first, rtx after)
15bbde2b 4146{
31d3e01c 4147 rtx last;
4148 rtx after_after;
4149 basic_block bb;
15bbde2b 4150
6d7dc5b9 4151 if (!BARRIER_P (after)
31d3e01c 4152 && (bb = BLOCK_FOR_INSN (after)))
15bbde2b 4153 {
31d3e01c 4154 bb->flags |= BB_DIRTY;
4155 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
6d7dc5b9 4156 if (!BARRIER_P (last))
31d3e01c 4157 set_block_for_insn (last, bb);
6d7dc5b9 4158 if (!BARRIER_P (last))
31d3e01c 4159 set_block_for_insn (last, bb);
5496dbfc 4160 if (BB_END (bb) == after)
4161 BB_END (bb) = last;
15bbde2b 4162 }
4163 else
31d3e01c 4164 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4165 continue;
4166
4167 after_after = NEXT_INSN (after);
4168
4169 NEXT_INSN (after) = first;
4170 PREV_INSN (first) = after;
4171 NEXT_INSN (last) = after_after;
4172 if (after_after)
4173 PREV_INSN (after_after) = last;
4174
4175 if (after == last_insn)
4176 last_insn = last;
4177 return last;
4178}
4179
4180/* Make X be output after the insn AFTER. */
4181
4182rtx
35cb5232 4183emit_insn_after (rtx x, rtx after)
31d3e01c 4184{
4185 rtx last = after;
4186
4187#ifdef ENABLE_RTL_CHECKING
4188 if (after == NULL_RTX)
4189 abort ();
4190#endif
4191
4192 if (x == NULL_RTX)
4193 return last;
4194
4195 switch (GET_CODE (x))
15bbde2b 4196 {
31d3e01c 4197 case INSN:
4198 case JUMP_INSN:
4199 case CALL_INSN:
4200 case CODE_LABEL:
4201 case BARRIER:
4202 case NOTE:
4203 last = emit_insn_after_1 (x, after);
4204 break;
4205
4206#ifdef ENABLE_RTL_CHECKING
4207 case SEQUENCE:
4208 abort ();
4209 break;
4210#endif
4211
4212 default:
4213 last = make_insn_raw (x);
4214 add_insn_after (last, after);
4215 break;
15bbde2b 4216 }
4217
31d3e01c 4218 return last;
15bbde2b 4219}
4220
1bea98fb 4221/* Similar to emit_insn_after, except that line notes are to be inserted so
4222 as to act as if this insn were at FROM. */
4223
4224void
35cb5232 4225emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
1bea98fb 4226{
4227 rtx from_line = find_line_note (from);
4228 rtx after_line = find_line_note (after);
31d3e01c 4229 rtx insn = emit_insn_after (x, after);
1bea98fb 4230
4231 if (from_line)
2f57e3d9 4232 emit_note_copy_after (from_line, after);
1bea98fb 4233
4234 if (after_line)
2f57e3d9 4235 emit_note_copy_after (after_line, insn);
1bea98fb 4236}
4237
31d3e01c 4238/* Make an insn of code JUMP_INSN with body X
15bbde2b 4239 and output it after the insn AFTER. */
4240
4241rtx
35cb5232 4242emit_jump_insn_after (rtx x, rtx after)
15bbde2b 4243{
31d3e01c 4244 rtx last;
15bbde2b 4245
31d3e01c 4246#ifdef ENABLE_RTL_CHECKING
4247 if (after == NULL_RTX)
4248 abort ();
4249#endif
4250
4251 switch (GET_CODE (x))
15bbde2b 4252 {
31d3e01c 4253 case INSN:
4254 case JUMP_INSN:
4255 case CALL_INSN:
4256 case CODE_LABEL:
4257 case BARRIER:
4258 case NOTE:
4259 last = emit_insn_after_1 (x, after);
4260 break;
4261
4262#ifdef ENABLE_RTL_CHECKING
4263 case SEQUENCE:
4264 abort ();
4265 break;
4266#endif
4267
4268 default:
4269 last = make_jump_insn_raw (x);
4270 add_insn_after (last, after);
4271 break;
15bbde2b 4272 }
4273
31d3e01c 4274 return last;
4275}
4276
4277/* Make an instruction with body X and code CALL_INSN
4278 and output it after the instruction AFTER. */
4279
4280rtx
35cb5232 4281emit_call_insn_after (rtx x, rtx after)
31d3e01c 4282{
4283 rtx last;
4284
4285#ifdef ENABLE_RTL_CHECKING
4286 if (after == NULL_RTX)
4287 abort ();
4288#endif
4289
4290 switch (GET_CODE (x))
4291 {
4292 case INSN:
4293 case JUMP_INSN:
4294 case CALL_INSN:
4295 case CODE_LABEL:
4296 case BARRIER:
4297 case NOTE:
4298 last = emit_insn_after_1 (x, after);
4299 break;
4300
4301#ifdef ENABLE_RTL_CHECKING
4302 case SEQUENCE:
4303 abort ();
4304 break;
4305#endif
4306
4307 default:
4308 last = make_call_insn_raw (x);
4309 add_insn_after (last, after);
4310 break;
4311 }
4312
4313 return last;
15bbde2b 4314}
4315
4316/* Make an insn of code BARRIER
4317 and output it after the insn AFTER. */
4318
4319rtx
35cb5232 4320emit_barrier_after (rtx after)
15bbde2b 4321{
19cb6b50 4322 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4323
4324 INSN_UID (insn) = cur_insn_uid++;
4325
4326 add_insn_after (insn, after);
4327 return insn;
4328}
4329
4330/* Emit the label LABEL after the insn AFTER. */
4331
4332rtx
35cb5232 4333emit_label_after (rtx label, rtx after)
15bbde2b 4334{
4335 /* This can be called twice for the same label
4336 as a result of the confusion that follows a syntax error!
4337 So make it harmless. */
4338 if (INSN_UID (label) == 0)
4339 {
4340 INSN_UID (label) = cur_insn_uid++;
4341 add_insn_after (label, after);
4342 }
4343
4344 return label;
4345}
4346
4347/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4348
4349rtx
35cb5232 4350emit_note_after (int subtype, rtx after)
15bbde2b 4351{
19cb6b50 4352 rtx note = rtx_alloc (NOTE);
15bbde2b 4353 INSN_UID (note) = cur_insn_uid++;
7bd3dcc4 4354#ifndef USE_MAPPED_LOCATION
15bbde2b 4355 NOTE_SOURCE_FILE (note) = 0;
7bd3dcc4 4356#endif
15bbde2b 4357 NOTE_LINE_NUMBER (note) = subtype;
ab87d1bc 4358 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4359 add_insn_after (note, after);
4360 return note;
4361}
4362
2f57e3d9 4363/* Emit a copy of note ORIG after the insn AFTER. */
15bbde2b 4364
4365rtx
2f57e3d9 4366emit_note_copy_after (rtx orig, rtx after)
15bbde2b 4367{
19cb6b50 4368 rtx note;
15bbde2b 4369
2f57e3d9 4370 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
15bbde2b 4371 {
4372 cur_insn_uid++;
4373 return 0;
4374 }
4375
d4c5e26d 4376 note = rtx_alloc (NOTE);
15bbde2b 4377 INSN_UID (note) = cur_insn_uid++;
2f57e3d9 4378 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4379 NOTE_DATA (note) = NOTE_DATA (orig);
ab87d1bc 4380 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4381 add_insn_after (note, after);
4382 return note;
4383}
4384\f
13751393 4385/* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4386rtx
35cb5232 4387emit_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4388{
4389 rtx last = emit_insn_after (pattern, after);
d321a68b 4390
ca154f3f 4391 if (pattern == NULL_RTX)
4392 return last;
4393
31d3e01c 4394 after = NEXT_INSN (after);
4395 while (1)
4396 {
59d19cb8 4397 if (active_insn_p (after))
13751393 4398 INSN_LOCATOR (after) = loc;
31d3e01c 4399 if (after == last)
4400 break;
4401 after = NEXT_INSN (after);
4402 }
d321a68b 4403 return last;
4404}
4405
13751393 4406/* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4407rtx
35cb5232 4408emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4409{
4410 rtx last = emit_jump_insn_after (pattern, after);
31d3e01c 4411
ca154f3f 4412 if (pattern == NULL_RTX)
4413 return last;
4414
31d3e01c 4415 after = NEXT_INSN (after);
4416 while (1)
4417 {
59d19cb8 4418 if (active_insn_p (after))
13751393 4419 INSN_LOCATOR (after) = loc;
31d3e01c 4420 if (after == last)
4421 break;
4422 after = NEXT_INSN (after);
4423 }
d321a68b 4424 return last;
4425}
4426
13751393 4427/* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4428rtx
35cb5232 4429emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4430{
4431 rtx last = emit_call_insn_after (pattern, after);
31d3e01c 4432
ca154f3f 4433 if (pattern == NULL_RTX)
4434 return last;
4435
31d3e01c 4436 after = NEXT_INSN (after);
4437 while (1)
4438 {
59d19cb8 4439 if (active_insn_p (after))
13751393 4440 INSN_LOCATOR (after) = loc;
31d3e01c 4441 if (after == last)
4442 break;
4443 after = NEXT_INSN (after);
4444 }
d321a68b 4445 return last;
4446}
4447
13751393 4448/* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4449rtx
35cb5232 4450emit_insn_before_setloc (rtx pattern, rtx before, int loc)
d321a68b 4451{
4452 rtx first = PREV_INSN (before);
4453 rtx last = emit_insn_before (pattern, before);
4454
ca154f3f 4455 if (pattern == NULL_RTX)
4456 return last;
4457
31d3e01c 4458 first = NEXT_INSN (first);
4459 while (1)
4460 {
59d19cb8 4461 if (active_insn_p (first))
13751393 4462 INSN_LOCATOR (first) = loc;
31d3e01c 4463 if (first == last)
4464 break;
4465 first = NEXT_INSN (first);
4466 }
d321a68b 4467 return last;
4468}
4469\f
31d3e01c 4470/* Take X and emit it at the end of the doubly-linked
4471 INSN list.
15bbde2b 4472
4473 Returns the last insn emitted. */
4474
4475rtx
35cb5232 4476emit_insn (rtx x)
15bbde2b 4477{
31d3e01c 4478 rtx last = last_insn;
4479 rtx insn;
15bbde2b 4480
31d3e01c 4481 if (x == NULL_RTX)
4482 return last;
15bbde2b 4483
31d3e01c 4484 switch (GET_CODE (x))
4485 {
4486 case INSN:
4487 case JUMP_INSN:
4488 case CALL_INSN:
4489 case CODE_LABEL:
4490 case BARRIER:
4491 case NOTE:
4492 insn = x;
4493 while (insn)
15bbde2b 4494 {
31d3e01c 4495 rtx next = NEXT_INSN (insn);
15bbde2b 4496 add_insn (insn);
31d3e01c 4497 last = insn;
4498 insn = next;
15bbde2b 4499 }
31d3e01c 4500 break;
15bbde2b 4501
31d3e01c 4502#ifdef ENABLE_RTL_CHECKING
4503 case SEQUENCE:
4504 abort ();
4505 break;
4506#endif
15bbde2b 4507
31d3e01c 4508 default:
4509 last = make_insn_raw (x);
4510 add_insn (last);
4511 break;
15bbde2b 4512 }
4513
4514 return last;
4515}
4516
31d3e01c 4517/* Make an insn of code JUMP_INSN with pattern X
4518 and add it to the end of the doubly-linked list. */
15bbde2b 4519
4520rtx
35cb5232 4521emit_jump_insn (rtx x)
15bbde2b 4522{
d90b3d04 4523 rtx last = NULL_RTX, insn;
15bbde2b 4524
31d3e01c 4525 switch (GET_CODE (x))
15bbde2b 4526 {
31d3e01c 4527 case INSN:
4528 case JUMP_INSN:
4529 case CALL_INSN:
4530 case CODE_LABEL:
4531 case BARRIER:
4532 case NOTE:
4533 insn = x;
4534 while (insn)
4535 {
4536 rtx next = NEXT_INSN (insn);
4537 add_insn (insn);
4538 last = insn;
4539 insn = next;
4540 }
4541 break;
b36b07d8 4542
31d3e01c 4543#ifdef ENABLE_RTL_CHECKING
4544 case SEQUENCE:
4545 abort ();
4546 break;
4547#endif
b36b07d8 4548
31d3e01c 4549 default:
4550 last = make_jump_insn_raw (x);
4551 add_insn (last);
4552 break;
9dda7915 4553 }
b36b07d8 4554
4555 return last;
4556}
4557
31d3e01c 4558/* Make an insn of code CALL_INSN with pattern X
15bbde2b 4559 and add it to the end of the doubly-linked list. */
4560
4561rtx
35cb5232 4562emit_call_insn (rtx x)
15bbde2b 4563{
31d3e01c 4564 rtx insn;
4565
4566 switch (GET_CODE (x))
15bbde2b 4567 {
31d3e01c 4568 case INSN:
4569 case JUMP_INSN:
4570 case CALL_INSN:
4571 case CODE_LABEL:
4572 case BARRIER:
4573 case NOTE:
4574 insn = emit_insn (x);
4575 break;
15bbde2b 4576
31d3e01c 4577#ifdef ENABLE_RTL_CHECKING
4578 case SEQUENCE:
4579 abort ();
4580 break;
4581#endif
15bbde2b 4582
31d3e01c 4583 default:
4584 insn = make_call_insn_raw (x);
15bbde2b 4585 add_insn (insn);
31d3e01c 4586 break;
15bbde2b 4587 }
31d3e01c 4588
4589 return insn;
15bbde2b 4590}
4591
4592/* Add the label LABEL to the end of the doubly-linked list. */
4593
4594rtx
35cb5232 4595emit_label (rtx label)
15bbde2b 4596{
4597 /* This can be called twice for the same label
4598 as a result of the confusion that follows a syntax error!
4599 So make it harmless. */
4600 if (INSN_UID (label) == 0)
4601 {
4602 INSN_UID (label) = cur_insn_uid++;
4603 add_insn (label);
4604 }
4605 return label;
4606}
4607
4608/* Make an insn of code BARRIER
4609 and add it to the end of the doubly-linked list. */
4610
4611rtx
35cb5232 4612emit_barrier (void)
15bbde2b 4613{
19cb6b50 4614 rtx barrier = rtx_alloc (BARRIER);
15bbde2b 4615 INSN_UID (barrier) = cur_insn_uid++;
4616 add_insn (barrier);
4617 return barrier;
4618}
4619
bccd9980 4620/* Make line numbering NOTE insn for LOCATION add it to the end
4621 of the doubly-linked list, but only if line-numbers are desired for
4622 debugging info and it doesn't match the previous one. */
15bbde2b 4623
4624rtx
bccd9980 4625emit_line_note (location_t location)
15bbde2b 4626{
31b97e8f 4627 rtx note;
bccd9980 4628
7bd3dcc4 4629#ifdef USE_MAPPED_LOCATION
4630 if (location == last_location)
4631 return NULL_RTX;
4632#else
bccd9980 4633 if (location.file && last_location.file
4634 && !strcmp (location.file, last_location.file)
4635 && location.line == last_location.line)
0a3b3d88 4636 return NULL_RTX;
7bd3dcc4 4637#endif
bccd9980 4638 last_location = location;
4639
15bbde2b 4640 if (no_line_numbers)
0a3b3d88 4641 {
4642 cur_insn_uid++;
4643 return NULL_RTX;
4644 }
15bbde2b 4645
7bd3dcc4 4646#ifdef USE_MAPPED_LOCATION
4647 note = emit_note ((int) location);
4648#else
bccd9980 4649 note = emit_note (location.line);
4650 NOTE_SOURCE_FILE (note) = location.file;
7bd3dcc4 4651#endif
2f57e3d9 4652
4653 return note;
4654}
4655
4656/* Emit a copy of note ORIG. */
35cb5232 4657
2f57e3d9 4658rtx
4659emit_note_copy (rtx orig)
4660{
4661 rtx note;
4662
4663 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4664 {
4665 cur_insn_uid++;
4666 return NULL_RTX;
4667 }
4668
4669 note = rtx_alloc (NOTE);
4670
4671 INSN_UID (note) = cur_insn_uid++;
4672 NOTE_DATA (note) = NOTE_DATA (orig);
4673 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4674 BLOCK_FOR_INSN (note) = NULL;
4675 add_insn (note);
4676
31b97e8f 4677 return note;
15bbde2b 4678}
4679
31b97e8f 4680/* Make an insn of code NOTE or type NOTE_NO
4681 and add it to the end of the doubly-linked list. */
15bbde2b 4682
4683rtx
35cb5232 4684emit_note (int note_no)
15bbde2b 4685{
19cb6b50 4686 rtx note;
15bbde2b 4687
15bbde2b 4688 note = rtx_alloc (NOTE);
4689 INSN_UID (note) = cur_insn_uid++;
31b97e8f 4690 NOTE_LINE_NUMBER (note) = note_no;
6c7786cb 4691 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ab87d1bc 4692 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4693 add_insn (note);
4694 return note;
4695}
4696
15bbde2b 4697/* Cause next statement to emit a line note even if the line number
bccd9980 4698 has not changed. */
15bbde2b 4699
4700void
35cb5232 4701force_next_line_note (void)
15bbde2b 4702{
7bd3dcc4 4703#ifdef USE_MAPPED_LOCATION
4704 last_location = -1;
4705#else
0a3b3d88 4706 last_location.line = -1;
7bd3dcc4 4707#endif
15bbde2b 4708}
f1934a33 4709
4710/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 4711 note of this type already exists, remove it first. */
f1934a33 4712
c080d8f0 4713rtx
35cb5232 4714set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
f1934a33 4715{
4716 rtx note = find_reg_note (insn, kind, NULL_RTX);
4717
7e6224ab 4718 switch (kind)
4719 {
4720 case REG_EQUAL:
4721 case REG_EQUIV:
4722 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4723 has multiple sets (some callers assume single_set
4724 means the insn only has one set, when in fact it
4725 means the insn only has one * useful * set). */
4726 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4727 {
4728 if (note)
4729 abort ();
4730 return NULL_RTX;
4731 }
4732
4733 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4734 It serves no useful purpose and breaks eliminate_regs. */
4735 if (GET_CODE (datum) == ASM_OPERANDS)
4736 return NULL_RTX;
4737 break;
4738
4739 default:
4740 break;
4741 }
c080d8f0 4742
d823ba47 4743 if (note)
c080d8f0 4744 {
4745 XEXP (note, 0) = datum;
4746 return note;
4747 }
f1934a33 4748
4749 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
c080d8f0 4750 return REG_NOTES (insn);
f1934a33 4751}
15bbde2b 4752\f
4753/* Return an indication of which type of insn should have X as a body.
4754 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4755
4756enum rtx_code
35cb5232 4757classify_insn (rtx x)
15bbde2b 4758{
6d7dc5b9 4759 if (LABEL_P (x))
15bbde2b 4760 return CODE_LABEL;
4761 if (GET_CODE (x) == CALL)
4762 return CALL_INSN;
4763 if (GET_CODE (x) == RETURN)
4764 return JUMP_INSN;
4765 if (GET_CODE (x) == SET)
4766 {
4767 if (SET_DEST (x) == pc_rtx)
4768 return JUMP_INSN;
4769 else if (GET_CODE (SET_SRC (x)) == CALL)
4770 return CALL_INSN;
4771 else
4772 return INSN;
4773 }
4774 if (GET_CODE (x) == PARALLEL)
4775 {
19cb6b50 4776 int j;
15bbde2b 4777 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4778 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4779 return CALL_INSN;
4780 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4781 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4782 return JUMP_INSN;
4783 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4784 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4785 return CALL_INSN;
4786 }
4787 return INSN;
4788}
4789
4790/* Emit the rtl pattern X as an appropriate kind of insn.
4791 If X is a label, it is simply added into the insn chain. */
4792
4793rtx
35cb5232 4794emit (rtx x)
15bbde2b 4795{
4796 enum rtx_code code = classify_insn (x);
4797
4798 if (code == CODE_LABEL)
4799 return emit_label (x);
4800 else if (code == INSN)
4801 return emit_insn (x);
4802 else if (code == JUMP_INSN)
4803 {
19cb6b50 4804 rtx insn = emit_jump_insn (x);
b2816317 4805 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
15bbde2b 4806 return emit_barrier ();
4807 return insn;
4808 }
4809 else if (code == CALL_INSN)
4810 return emit_call_insn (x);
4811 else
4812 abort ();
4813}
4814\f
1f3233d1 4815/* Space for free sequence stack entries. */
7035b2ab 4816static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
1f3233d1 4817
735f4358 4818/* Begin emitting insns to a sequence. If this sequence will contain
4819 something that might cause the compiler to pop arguments to function
4820 calls (because those pops have previously been deferred; see
4821 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4822 before calling this function. That will ensure that the deferred
4823 pops are not accidentally emitted in the middle of this sequence. */
15bbde2b 4824
4825void
35cb5232 4826start_sequence (void)
15bbde2b 4827{
4828 struct sequence_stack *tem;
4829
1f3233d1 4830 if (free_sequence_stack != NULL)
4831 {
4832 tem = free_sequence_stack;
4833 free_sequence_stack = tem->next;
4834 }
4835 else
f0af5a88 4836 tem = ggc_alloc (sizeof (struct sequence_stack));
15bbde2b 4837
0a893c29 4838 tem->next = seq_stack;
15bbde2b 4839 tem->first = first_insn;
4840 tem->last = last_insn;
4841
0a893c29 4842 seq_stack = tem;
15bbde2b 4843
4844 first_insn = 0;
4845 last_insn = 0;
4846}
4847
b49854c6 4848/* Set up the insn chain starting with FIRST as the current sequence,
4849 saving the previously current one. See the documentation for
4850 start_sequence for more information about how to use this function. */
15bbde2b 4851
4852void
35cb5232 4853push_to_sequence (rtx first)
15bbde2b 4854{
4855 rtx last;
4856
4857 start_sequence ();
4858
4859 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4860
4861 first_insn = first;
4862 last_insn = last;
4863}
4864
78147e84 4865/* Set up the insn chain from a chain stort in FIRST to LAST. */
4866
4867void
35cb5232 4868push_to_full_sequence (rtx first, rtx last)
78147e84 4869{
4870 start_sequence ();
4871 first_insn = first;
4872 last_insn = last;
4873 /* We really should have the end of the insn chain here. */
4874 if (last && NEXT_INSN (last))
4875 abort ();
4876}
4877
ab74c92f 4878/* Set up the outer-level insn chain
4879 as the current sequence, saving the previously current one. */
4880
4881void
35cb5232 4882push_topmost_sequence (void)
ab74c92f 4883{
2041cfd9 4884 struct sequence_stack *stack, *top = NULL;
ab74c92f 4885
4886 start_sequence ();
4887
0a893c29 4888 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 4889 top = stack;
4890
4891 first_insn = top->first;
4892 last_insn = top->last;
4893}
4894
4895/* After emitting to the outer-level insn chain, update the outer-level
4896 insn chain, and restore the previous saved state. */
4897
4898void
35cb5232 4899pop_topmost_sequence (void)
ab74c92f 4900{
2041cfd9 4901 struct sequence_stack *stack, *top = NULL;
ab74c92f 4902
0a893c29 4903 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 4904 top = stack;
4905
4906 top->first = first_insn;
4907 top->last = last_insn;
4908
4909 end_sequence ();
4910}
4911
15bbde2b 4912/* After emitting to a sequence, restore previous saved state.
4913
b49854c6 4914 To get the contents of the sequence just made, you must call
31d3e01c 4915 `get_insns' *before* calling here.
b49854c6 4916
4917 If the compiler might have deferred popping arguments while
4918 generating this sequence, and this sequence will not be immediately
4919 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 4920 before calling get_insns. That will ensure that the deferred
b49854c6 4921 pops are inserted into this sequence, and not into some random
4922 location in the instruction stream. See INHIBIT_DEFER_POP for more
4923 information about deferred popping of arguments. */
15bbde2b 4924
4925void
35cb5232 4926end_sequence (void)
15bbde2b 4927{
0a893c29 4928 struct sequence_stack *tem = seq_stack;
15bbde2b 4929
4930 first_insn = tem->first;
4931 last_insn = tem->last;
0a893c29 4932 seq_stack = tem->next;
15bbde2b 4933
1f3233d1 4934 memset (tem, 0, sizeof (*tem));
4935 tem->next = free_sequence_stack;
4936 free_sequence_stack = tem;
15bbde2b 4937}
4938
4939/* Return 1 if currently emitting into a sequence. */
4940
4941int
35cb5232 4942in_sequence_p (void)
15bbde2b 4943{
0a893c29 4944 return seq_stack != 0;
15bbde2b 4945}
15bbde2b 4946\f
02ebfa52 4947/* Put the various virtual registers into REGNO_REG_RTX. */
4948
4949void
35cb5232 4950init_virtual_regs (struct emit_status *es)
02ebfa52 4951{
0a893c29 4952 rtx *ptr = es->x_regno_reg_rtx;
4953 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4954 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4955 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4956 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4957 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4958}
4959
928d57e3 4960\f
4961/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4962static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4963static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4964static int copy_insn_n_scratches;
4965
4966/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4967 copied an ASM_OPERANDS.
4968 In that case, it is the original input-operand vector. */
4969static rtvec orig_asm_operands_vector;
4970
4971/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4972 copied an ASM_OPERANDS.
4973 In that case, it is the copied input-operand vector. */
4974static rtvec copy_asm_operands_vector;
4975
4976/* Likewise for the constraints vector. */
4977static rtvec orig_asm_constraints_vector;
4978static rtvec copy_asm_constraints_vector;
4979
4980/* Recursively create a new copy of an rtx for copy_insn.
4981 This function differs from copy_rtx in that it handles SCRATCHes and
4982 ASM_OPERANDs properly.
4983 Normally, this function is not used directly; use copy_insn as front end.
4984 However, you could first copy an insn pattern with copy_insn and then use
4985 this function afterwards to properly copy any REG_NOTEs containing
4986 SCRATCHes. */
4987
4988rtx
35cb5232 4989copy_insn_1 (rtx orig)
928d57e3 4990{
19cb6b50 4991 rtx copy;
4992 int i, j;
4993 RTX_CODE code;
4994 const char *format_ptr;
928d57e3 4995
4996 code = GET_CODE (orig);
4997
4998 switch (code)
4999 {
5000 case REG:
928d57e3 5001 case CONST_INT:
5002 case CONST_DOUBLE:
886cfd4f 5003 case CONST_VECTOR:
928d57e3 5004 case SYMBOL_REF:
5005 case CODE_LABEL:
5006 case PC:
5007 case CC0:
928d57e3 5008 return orig;
c09425a0 5009 case CLOBBER:
5010 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5011 return orig;
5012 break;
928d57e3 5013
5014 case SCRATCH:
5015 for (i = 0; i < copy_insn_n_scratches; i++)
5016 if (copy_insn_scratch_in[i] == orig)
5017 return copy_insn_scratch_out[i];
5018 break;
5019
5020 case CONST:
5021 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5022 a LABEL_REF, it isn't sharable. */
5023 if (GET_CODE (XEXP (orig, 0)) == PLUS
5024 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5025 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5026 return orig;
5027 break;
d823ba47 5028
928d57e3 5029 /* A MEM with a constant address is not sharable. The problem is that
5030 the constant address may need to be reloaded. If the mem is shared,
5031 then reloading one copy of this mem will cause all copies to appear
5032 to have been reloaded. */
5033
5034 default:
5035 break;
5036 }
5037
5038 copy = rtx_alloc (code);
5039
5040 /* Copy the various flags, and other information. We assume that
5041 all fields need copying, and then clear the fields that should
5042 not be copied. That is the sensible default behavior, and forces
5043 us to explicitly document why we are *not* copying a flag. */
bf6b5685 5044 memcpy (copy, orig, RTX_HDR_SIZE);
928d57e3 5045
5046 /* We do not copy the USED flag, which is used as a mark bit during
5047 walks over the RTL. */
7c25cb91 5048 RTX_FLAG (copy, used) = 0;
928d57e3 5049
5050 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
6720e96c 5051 if (INSN_P (orig))
928d57e3 5052 {
7c25cb91 5053 RTX_FLAG (copy, jump) = 0;
5054 RTX_FLAG (copy, call) = 0;
5055 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5056 }
d823ba47 5057
928d57e3 5058 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5059
5060 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5061 {
bf6b5685 5062 copy->u.fld[i] = orig->u.fld[i];
928d57e3 5063 switch (*format_ptr++)
5064 {
5065 case 'e':
928d57e3 5066 if (XEXP (orig, i) != NULL)
5067 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5068 break;
5069
928d57e3 5070 case 'E':
5071 case 'V':
928d57e3 5072 if (XVEC (orig, i) == orig_asm_constraints_vector)
5073 XVEC (copy, i) = copy_asm_constraints_vector;
5074 else if (XVEC (orig, i) == orig_asm_operands_vector)
5075 XVEC (copy, i) = copy_asm_operands_vector;
5076 else if (XVEC (orig, i) != NULL)
5077 {
5078 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5079 for (j = 0; j < XVECLEN (copy, i); j++)
5080 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5081 }
5082 break;
5083
928d57e3 5084 case 't':
928d57e3 5085 case 'w':
928d57e3 5086 case 'i':
928d57e3 5087 case 's':
5088 case 'S':
d925550d 5089 case 'u':
5090 case '0':
5091 /* These are left unchanged. */
928d57e3 5092 break;
5093
5094 default:
5095 abort ();
5096 }
5097 }
5098
5099 if (code == SCRATCH)
5100 {
5101 i = copy_insn_n_scratches++;
5102 if (i >= MAX_RECOG_OPERANDS)
5103 abort ();
5104 copy_insn_scratch_in[i] = orig;
5105 copy_insn_scratch_out[i] = copy;
5106 }
5107 else if (code == ASM_OPERANDS)
5108 {
d91f2122 5109 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5110 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5111 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5112 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5113 }
5114
5115 return copy;
5116}
5117
5118/* Create a new copy of an rtx.
5119 This function differs from copy_rtx in that it handles SCRATCHes and
5120 ASM_OPERANDs properly.
5121 INSN doesn't really have to be a full INSN; it could be just the
5122 pattern. */
5123rtx
35cb5232 5124copy_insn (rtx insn)
928d57e3 5125{
5126 copy_insn_n_scratches = 0;
5127 orig_asm_operands_vector = 0;
5128 orig_asm_constraints_vector = 0;
5129 copy_asm_operands_vector = 0;
5130 copy_asm_constraints_vector = 0;
5131 return copy_insn_1 (insn);
5132}
02ebfa52 5133
15bbde2b 5134/* Initialize data structures and variables in this file
5135 before generating rtl for each function. */
5136
5137void
35cb5232 5138init_emit (void)
15bbde2b 5139{
08513b52 5140 struct function *f = cfun;
15bbde2b 5141
f0af5a88 5142 f->emit = ggc_alloc (sizeof (struct emit_status));
15bbde2b 5143 first_insn = NULL;
5144 last_insn = NULL;
5145 cur_insn_uid = 1;
5146 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
7bd3dcc4 5147 last_location = UNKNOWN_LOCATION;
15bbde2b 5148 first_label_num = label_num;
5149 last_label_num = 0;
0a893c29 5150 seq_stack = NULL;
15bbde2b 5151
15bbde2b 5152 /* Init the tables that describe all the pseudo regs. */
5153
e61a0a7f 5154 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5155
0a893c29 5156 f->emit->regno_pointer_align
f0af5a88 5157 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5158 * sizeof (unsigned char));
d4c332ff 5159
d823ba47 5160 regno_reg_rtx
f0af5a88 5161 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
fcdc122e 5162
936082bb 5163 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5164 memcpy (regno_reg_rtx,
5165 static_regno_reg_rtx,
5166 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5167
15bbde2b 5168 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
0a893c29 5169 init_virtual_regs (f->emit);
888e0d33 5170
5171 /* Indicate that the virtual registers and stack locations are
5172 all pointers. */
e61a0a7f 5173 REG_POINTER (stack_pointer_rtx) = 1;
5174 REG_POINTER (frame_pointer_rtx) = 1;
5175 REG_POINTER (hard_frame_pointer_rtx) = 1;
5176 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5177
e61a0a7f 5178 REG_POINTER (virtual_incoming_args_rtx) = 1;
5179 REG_POINTER (virtual_stack_vars_rtx) = 1;
5180 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5181 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5182 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5183
d4c332ff 5184#ifdef STACK_BOUNDARY
80909c64 5185 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5186 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5187 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5188 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5189
5190 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5191 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5192 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5193 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5194 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5195#endif
5196
89525da0 5197#ifdef INIT_EXPANDERS
5198 INIT_EXPANDERS;
5199#endif
15bbde2b 5200}
5201
89dd3424 5202/* Generate the constant 0. */
886cfd4f 5203
5204static rtx
35cb5232 5205gen_const_vector_0 (enum machine_mode mode)
886cfd4f 5206{
5207 rtx tem;
5208 rtvec v;
5209 int units, i;
5210 enum machine_mode inner;
5211
5212 units = GET_MODE_NUNITS (mode);
5213 inner = GET_MODE_INNER (mode);
5214
5215 v = rtvec_alloc (units);
5216
5217 /* We need to call this function after we to set CONST0_RTX first. */
5218 if (!CONST0_RTX (inner))
5219 abort ();
5220
5221 for (i = 0; i < units; ++i)
5222 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5223
9426b612 5224 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5225 return tem;
5226}
5227
9426b612 5228/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5229 all elements are zero. */
5230rtx
35cb5232 5231gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
9426b612 5232{
5233 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5234 int i;
5235
5236 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5237 if (RTVEC_ELT (v, i) != inner_zero)
5238 return gen_rtx_raw_CONST_VECTOR (mode, v);
5239 return CONST0_RTX (mode);
5240}
5241
15bbde2b 5242/* Create some permanent unique rtl objects shared between all functions.
5243 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5244
5245void
35cb5232 5246init_emit_once (int line_numbers)
15bbde2b 5247{
5248 int i;
5249 enum machine_mode mode;
9e042f31 5250 enum machine_mode double_mode;
15bbde2b 5251
7981458e 5252 /* We need reg_raw_mode, so initialize the modes now. */
c235b58a 5253 init_reg_modes_once ();
5254
2ff23ed0 5255 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5256 tables. */
573aba85 5257 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5258 const_int_htab_eq, NULL);
c6259b83 5259
573aba85 5260 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5261 const_double_htab_eq, NULL);
2ff23ed0 5262
573aba85 5263 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5264 mem_attrs_htab_eq, NULL);
ca74b940 5265 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5266 reg_attrs_htab_eq, NULL);
77695070 5267
15bbde2b 5268 no_line_numbers = ! line_numbers;
5269
71d7daa2 5270 /* Compute the word and byte modes. */
5271
5272 byte_mode = VOIDmode;
5273 word_mode = VOIDmode;
5274 double_mode = VOIDmode;
5275
5276 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5277 mode = GET_MODE_WIDER_MODE (mode))
5278 {
5279 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5280 && byte_mode == VOIDmode)
5281 byte_mode = mode;
5282
5283 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5284 && word_mode == VOIDmode)
5285 word_mode = mode;
5286 }
5287
71d7daa2 5288 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5289 mode = GET_MODE_WIDER_MODE (mode))
5290 {
5291 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5292 && double_mode == VOIDmode)
5293 double_mode = mode;
5294 }
5295
5296 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5297
57c097d5 5298 /* Assign register numbers to the globally defined register rtx.
5299 This must be done at runtime because the register number field
5300 is in a union and some compilers can't initialize unions. */
5301
27a9551b 5302 pc_rtx = gen_rtx_PC (VOIDmode);
5303 cc0_rtx = gen_rtx_CC0 (VOIDmode);
22cf44bc 5304 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5305 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
57c097d5 5306 if (hard_frame_pointer_rtx == 0)
d823ba47 5307 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
22cf44bc 5308 HARD_FRAME_POINTER_REGNUM);
57c097d5 5309 if (arg_pointer_rtx == 0)
22cf44bc 5310 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
d823ba47 5311 virtual_incoming_args_rtx =
22cf44bc 5312 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
d823ba47 5313 virtual_stack_vars_rtx =
22cf44bc 5314 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
d823ba47 5315 virtual_stack_dynamic_rtx =
22cf44bc 5316 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
d823ba47 5317 virtual_outgoing_args_rtx =
5318 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
22cf44bc 5319 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
57c097d5 5320
90295bd2 5321 /* Initialize RTL for commonly used hard registers. These are
5322 copied into regno_reg_rtx as we begin to compile each function. */
5323 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5324 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5325
57c097d5 5326#ifdef INIT_EXPANDERS
ab5beff9 5327 /* This is to initialize {init|mark|free}_machine_status before the first
5328 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5329 end which calls push_function_context_to before the first call to
57c097d5 5330 init_function_start. */
5331 INIT_EXPANDERS;
5332#endif
5333
15bbde2b 5334 /* Create the unique rtx's for certain rtx codes and operand values. */
5335
8fd5918e 5336 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
7014838c 5337 tries to use these variables. */
15bbde2b 5338 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5339 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5340 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5341
1a60f06a 5342 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5343 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5344 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5345 else
3ad7bb1c 5346 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5347
2ff23ed0 5348 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5349 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5350 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
8918c507 5351 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5352 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
2ff23ed0 5353 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
77e89269 5354 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5355
5356 dconsthalf = dconst1;
9d96125b 5357 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
15bbde2b 5358
8918c507 5359 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5360
79082756 5361 /* Initialize mathematical constants for constant folding builtins.
5362 These constants need to be given to at least 160 bits precision. */
5363 real_from_string (&dconstpi,
5364 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5365 real_from_string (&dconste,
5366 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5367
8918c507 5368 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
15bbde2b 5369 {
badfe841 5370 REAL_VALUE_TYPE *r =
5371 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5372
15bbde2b 5373 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5374 mode = GET_MODE_WIDER_MODE (mode))
2ff23ed0 5375 const_tiny_rtx[i][(int) mode] =
5376 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
15bbde2b 5377
b572011e 5378 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5379
5380 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5381 mode = GET_MODE_WIDER_MODE (mode))
b572011e 5382 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5383
5384 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5385 mode != VOIDmode;
5386 mode = GET_MODE_WIDER_MODE (mode))
5387 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5388 }
5389
886cfd4f 5390 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5391 mode != VOIDmode;
5392 mode = GET_MODE_WIDER_MODE (mode))
89dd3424 5393 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
886cfd4f 5394
5395 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5396 mode != VOIDmode;
5397 mode = GET_MODE_WIDER_MODE (mode))
89dd3424 5398 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
886cfd4f 5399
0fd4500a 5400 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5401 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5402 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 5403
065336b4 5404 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5405 if (STORE_FLAG_VALUE == 1)
5406 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5407
f4bffa58 5408#ifdef RETURN_ADDRESS_POINTER_REGNUM
5409 return_address_pointer_rtx
22cf44bc 5410 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
f4bffa58 5411#endif
5412
f4bffa58 5413#ifdef STATIC_CHAIN_REGNUM
5414 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5415
5416#ifdef STATIC_CHAIN_INCOMING_REGNUM
5417 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5418 static_chain_incoming_rtx
5419 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5420 else
5421#endif
5422 static_chain_incoming_rtx = static_chain_rtx;
5423#endif
5424
5425#ifdef STATIC_CHAIN
5426 static_chain_rtx = STATIC_CHAIN;
5427
5428#ifdef STATIC_CHAIN_INCOMING
5429 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5430#else
5431 static_chain_incoming_rtx = static_chain_rtx;
5432#endif
5433#endif
5434
3473aefe 5435 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
52bcea50 5436 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
15bbde2b 5437}
ac6c481d 5438\f
cd0fe062 5439/* Produce exact duplicate of insn INSN after AFTER.
5440 Care updating of libcall regions if present. */
5441
5442rtx
35cb5232 5443emit_copy_of_insn_after (rtx insn, rtx after)
cd0fe062 5444{
5445 rtx new;
5446 rtx note1, note2, link;
5447
5448 switch (GET_CODE (insn))
5449 {
5450 case INSN:
5451 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5452 break;
5453
5454 case JUMP_INSN:
5455 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5456 break;
5457
5458 case CALL_INSN:
5459 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5460 if (CALL_INSN_FUNCTION_USAGE (insn))
5461 CALL_INSN_FUNCTION_USAGE (new)
5462 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5463 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5464 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5465 break;
5466
5467 default:
5468 abort ();
5469 }
5470
5471 /* Update LABEL_NUSES. */
5472 mark_jump_label (PATTERN (new), new, 0);
5473
13751393 5474 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
ab87d1bc 5475
cd0fe062 5476 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5477 make them. */
5478 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5479 if (REG_NOTE_KIND (link) != REG_LABEL)
5480 {
5481 if (GET_CODE (link) == EXPR_LIST)
5482 REG_NOTES (new)
5483 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5484 XEXP (link, 0),
5485 REG_NOTES (new)));
5486 else
5487 REG_NOTES (new)
5488 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5489 XEXP (link, 0),
5490 REG_NOTES (new)));
5491 }
5492
5493 /* Fix the libcall sequences. */
5494 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5495 {
5496 rtx p = new;
5497 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5498 p = PREV_INSN (p);
5499 XEXP (note1, 0) = p;
5500 XEXP (note2, 0) = new;
5501 }
ce07bc35 5502 INSN_CODE (new) = INSN_CODE (insn);
cd0fe062 5503 return new;
5504}
1f3233d1 5505
7035b2ab 5506static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
c09425a0 5507rtx
5508gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5509{
5510 if (hard_reg_clobbers[mode][regno])
5511 return hard_reg_clobbers[mode][regno];
5512 else
5513 return (hard_reg_clobbers[mode][regno] =
5514 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5515}
5516
1f3233d1 5517#include "gt-emit-rtl.h"