]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
* gcc.target/arm/pr40835.c: Require a thumb1 target, do not force
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
ddca3e9d 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
e077413c 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
535664e3 4 Free Software Foundation, Inc.
15bbde2b 5
f12b58b3 6This file is part of GCC.
15bbde2b 7
f12b58b3 8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
f12b58b3 11version.
15bbde2b 12
f12b58b3 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
15bbde2b 17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
15bbde2b 21
22
23/* Middle-to-low level generation of rtx code and insns.
24
74efa612 25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
15bbde2b 27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
74efa612 30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
8fd5918e 33 dependent is the kind of rtx's they make and what arguments they
34 use. */
15bbde2b 35
36#include "config.h"
405711de 37#include "system.h"
805e22b2 38#include "coretypes.h"
39#include "tm.h"
d3b64f2d 40#include "toplev.h"
15bbde2b 41#include "rtl.h"
3fd7e17f 42#include "tree.h"
7953c610 43#include "tm_p.h"
15bbde2b 44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
c6b6c51f 48#include "hard-reg-set.h"
73f5c1e3 49#include "hashtab.h"
15bbde2b 50#include "insn-config.h"
0dbd1c74 51#include "recog.h"
15bbde2b 52#include "real.h"
06f0b99c 53#include "fixed-value.h"
a3426c4c 54#include "bitmap.h"
f3d96a58 55#include "basic-block.h"
a7b0c170 56#include "ggc.h"
b29760a8 57#include "debug.h"
b0278d39 58#include "langhooks.h"
77fce4cd 59#include "tree-pass.h"
3072d30e 60#include "df.h"
9845d120 61#include "params.h"
98155838 62#include "target.h"
649d8da6 63
399d45d3 64/* Commonly used modes. */
65
a92771b8 66enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
67enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9e042f31 68enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
a92771b8 69enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 70
b079a207 71/* Datastructures maintained for currently processed function in RTL form. */
72
fd6ffb7c 73struct rtl_data x_rtl;
b079a207 74
75/* Indexed by pseudo register number, gives the rtx for that pseudo.
76 Allocated in parallel with regno_pointer_align.
77 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
78 with length attribute nested in top level structures. */
79
80rtx * regno_reg_rtx;
15bbde2b 81
82/* This is *not* reset after each function. It gives each CODE_LABEL
83 in the entire compilation a unique label number. */
84
9105005a 85static GTY(()) int label_num = 1;
15bbde2b 86
15bbde2b 87/* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
2ff23ed0 89 All of these are unique; no other rtx-object will be equal to any
90 of these. */
15bbde2b 91
57c097d5 92rtx global_rtl[GR_MAX];
15bbde2b 93
90295bd2 94/* Commonly used RTL for hard registers. These objects are not necessarily
95 unique, so we allocate them separately from global_rtl. They are
96 initialized once per compilation unit, then copied into regno_reg_rtx
97 at the beginning of each function. */
98static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
99
15bbde2b 100/* We record floating-point CONST_DOUBLEs in each floating-point mode for
101 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
102 record a copy of const[012]_rtx. */
103
104rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
105
1a60f06a 106rtx const_true_rtx;
107
15bbde2b 108REAL_VALUE_TYPE dconst0;
109REAL_VALUE_TYPE dconst1;
110REAL_VALUE_TYPE dconst2;
111REAL_VALUE_TYPE dconstm1;
77e89269 112REAL_VALUE_TYPE dconsthalf;
15bbde2b 113
06f0b99c 114/* Record fixed-point constant 0 and 1. */
115FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
116FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
117
15bbde2b 118/* All references to the following fixed hard registers go through
119 these unique rtl objects. On machines where the frame-pointer and
120 arg-pointer are the same register, they use the same unique object.
121
122 After register allocation, other rtl objects which used to be pseudo-regs
123 may be clobbered to refer to the frame-pointer register.
124 But references that were originally to the frame-pointer can be
125 distinguished from the others because they contain frame_pointer_rtx.
126
90c25824 127 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
128 tricky: until register elimination has taken place hard_frame_pointer_rtx
d823ba47 129 should be used if it is being set, and frame_pointer_rtx otherwise. After
90c25824 130 register elimination hard_frame_pointer_rtx should always be used.
131 On machines where the two registers are same (most) then these are the
132 same.
133
15bbde2b 134 In an inline procedure, the stack and frame pointer rtxs may not be
135 used for anything else. */
15bbde2b 136rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
137
4b0c5859 138/* This is used to implement __builtin_return_address for some machines.
139 See for instance the MIPS port. */
140rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
141
15bbde2b 142/* We make one copy of (const_int C) where C is in
143 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
144 to save space during the compilation and simplify comparisons of
145 integers. */
146
57c097d5 147rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 148
73f5c1e3 149/* A hash table storing CONST_INTs whose absolute value is greater
150 than MAX_SAVED_CONST_INT. */
151
1f3233d1 152static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
153 htab_t const_int_htab;
73f5c1e3 154
c6259b83 155/* A hash table storing memory attribute structures. */
1f3233d1 156static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
157 htab_t mem_attrs_htab;
c6259b83 158
ca74b940 159/* A hash table storing register attribute structures. */
160static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
161 htab_t reg_attrs_htab;
162
2ff23ed0 163/* A hash table storing all CONST_DOUBLEs. */
1f3233d1 164static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
165 htab_t const_double_htab;
2ff23ed0 166
e397ad8e 167/* A hash table storing all CONST_FIXEDs. */
168static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_fixed_htab;
170
fd6ffb7c 171#define first_insn (crtl->emit.x_first_insn)
172#define last_insn (crtl->emit.x_last_insn)
173#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
9845d120 174#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
fd6ffb7c 175#define last_location (crtl->emit.x_last_location)
176#define first_label_num (crtl->emit.x_first_label_num)
15bbde2b 177
35cb5232 178static rtx make_call_insn_raw (rtx);
35cb5232 179static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
265be050 180static void set_used_decls (tree);
35cb5232 181static void mark_label_nuses (rtx);
182static hashval_t const_int_htab_hash (const void *);
183static int const_int_htab_eq (const void *, const void *);
184static hashval_t const_double_htab_hash (const void *);
185static int const_double_htab_eq (const void *, const void *);
186static rtx lookup_const_double (rtx);
e397ad8e 187static hashval_t const_fixed_htab_hash (const void *);
188static int const_fixed_htab_eq (const void *, const void *);
189static rtx lookup_const_fixed (rtx);
35cb5232 190static hashval_t mem_attrs_htab_hash (const void *);
191static int mem_attrs_htab_eq (const void *, const void *);
32c2fdea 192static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
bd1a81f7 193 addr_space_t, enum machine_mode);
35cb5232 194static hashval_t reg_attrs_htab_hash (const void *);
195static int reg_attrs_htab_eq (const void *, const void *);
196static reg_attrs *get_reg_attrs (tree, int);
6e68dcb2 197static rtx gen_const_vector (enum machine_mode, int);
0e0727c4 198static void copy_rtx_if_shared_1 (rtx *orig);
73f5c1e3 199
3cd757b1 200/* Probability of the conditional branch currently proceeded by try_split.
201 Set to -1 otherwise. */
202int split_branch_probability = -1;
649d8da6 203\f
73f5c1e3 204/* Returns a hash code for X (which is a really a CONST_INT). */
205
206static hashval_t
35cb5232 207const_int_htab_hash (const void *x)
73f5c1e3 208{
dd9b9fc5 209 return (hashval_t) INTVAL ((const_rtx) x);
73f5c1e3 210}
211
6ef828f9 212/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 213 CONST_INT) is the same as that given by Y (which is really a
214 HOST_WIDE_INT *). */
215
216static int
35cb5232 217const_int_htab_eq (const void *x, const void *y)
73f5c1e3 218{
dd9b9fc5 219 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
2ff23ed0 220}
221
222/* Returns a hash code for X (which is really a CONST_DOUBLE). */
223static hashval_t
35cb5232 224const_double_htab_hash (const void *x)
2ff23ed0 225{
dd9b9fc5 226 const_rtx const value = (const_rtx) x;
3393215f 227 hashval_t h;
2ff23ed0 228
3393215f 229 if (GET_MODE (value) == VOIDmode)
230 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
231 else
a5760913 232 {
e2e205b3 233 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 234 /* MODE is used in the comparison, so it should be in the hash. */
235 h ^= GET_MODE (value);
236 }
2ff23ed0 237 return h;
238}
239
6ef828f9 240/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 241 is the same as that represented by Y (really a ...) */
242static int
35cb5232 243const_double_htab_eq (const void *x, const void *y)
2ff23ed0 244{
dd9b9fc5 245 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
2ff23ed0 246
247 if (GET_MODE (a) != GET_MODE (b))
248 return 0;
f82a103d 249 if (GET_MODE (a) == VOIDmode)
250 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
251 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
252 else
253 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
254 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 255}
256
e397ad8e 257/* Returns a hash code for X (which is really a CONST_FIXED). */
258
259static hashval_t
260const_fixed_htab_hash (const void *x)
261{
a9f1838b 262 const_rtx const value = (const_rtx) x;
e397ad8e 263 hashval_t h;
264
265 h = fixed_hash (CONST_FIXED_VALUE (value));
266 /* MODE is used in the comparison, so it should be in the hash. */
267 h ^= GET_MODE (value);
268 return h;
269}
270
271/* Returns nonzero if the value represented by X (really a ...)
272 is the same as that represented by Y (really a ...). */
273
274static int
275const_fixed_htab_eq (const void *x, const void *y)
276{
a9f1838b 277 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
e397ad8e 278
279 if (GET_MODE (a) != GET_MODE (b))
280 return 0;
281 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
282}
283
c6259b83 284/* Returns a hash code for X (which is a really a mem_attrs *). */
285
286static hashval_t
35cb5232 287mem_attrs_htab_hash (const void *x)
c6259b83 288{
dd9b9fc5 289 const mem_attrs *const p = (const mem_attrs *) x;
c6259b83 290
291 return (p->alias ^ (p->align * 1000)
bd1a81f7 292 ^ (p->addrspace * 4000)
c6259b83 293 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
294 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
2f16183e 295 ^ (size_t) iterative_hash_expr (p->expr, 0));
c6259b83 296}
297
6ef828f9 298/* Returns nonzero if the value represented by X (which is really a
c6259b83 299 mem_attrs *) is the same as that given by Y (which is also really a
300 mem_attrs *). */
73f5c1e3 301
302static int
35cb5232 303mem_attrs_htab_eq (const void *x, const void *y)
73f5c1e3 304{
aae87fc3 305 const mem_attrs *const p = (const mem_attrs *) x;
306 const mem_attrs *const q = (const mem_attrs *) y;
c6259b83 307
2f16183e 308 return (p->alias == q->alias && p->offset == q->offset
309 && p->size == q->size && p->align == q->align
bd1a81f7 310 && p->addrspace == q->addrspace
2f16183e 311 && (p->expr == q->expr
312 || (p->expr != NULL_TREE && q->expr != NULL_TREE
313 && operand_equal_p (p->expr, q->expr, 0))));
73f5c1e3 314}
315
c6259b83 316/* Allocate a new mem_attrs structure and insert it into the hash table if
5cc193e7 317 one identical to it is not already in the table. We are doing this for
318 MEM of mode MODE. */
c6259b83 319
320static mem_attrs *
32c2fdea 321get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
bd1a81f7 322 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
c6259b83 323{
324 mem_attrs attrs;
325 void **slot;
326
d5c80165 327 /* If everything is the default, we can just return zero.
328 This must match what the corresponding MEM_* macros return when the
329 field is not present. */
bd1a81f7 330 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
5cc193e7 331 && (size == 0
332 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
d5c80165 333 && (STRICT_ALIGNMENT && mode != BLKmode
334 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
5cc193e7 335 return 0;
336
c6259b83 337 attrs.alias = alias;
b10dbbca 338 attrs.expr = expr;
c6259b83 339 attrs.offset = offset;
340 attrs.size = size;
341 attrs.align = align;
bd1a81f7 342 attrs.addrspace = addrspace;
c6259b83 343
344 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
345 if (*slot == 0)
346 {
347 *slot = ggc_alloc (sizeof (mem_attrs));
348 memcpy (*slot, &attrs, sizeof (mem_attrs));
349 }
350
2457c754 351 return (mem_attrs *) *slot;
73f5c1e3 352}
353
ca74b940 354/* Returns a hash code for X (which is a really a reg_attrs *). */
355
356static hashval_t
35cb5232 357reg_attrs_htab_hash (const void *x)
ca74b940 358{
aae87fc3 359 const reg_attrs *const p = (const reg_attrs *) x;
ca74b940 360
361 return ((p->offset * 1000) ^ (long) p->decl);
362}
363
7ef5b942 364/* Returns nonzero if the value represented by X (which is really a
ca74b940 365 reg_attrs *) is the same as that given by Y (which is also really a
366 reg_attrs *). */
367
368static int
35cb5232 369reg_attrs_htab_eq (const void *x, const void *y)
ca74b940 370{
aae87fc3 371 const reg_attrs *const p = (const reg_attrs *) x;
372 const reg_attrs *const q = (const reg_attrs *) y;
ca74b940 373
374 return (p->decl == q->decl && p->offset == q->offset);
375}
376/* Allocate a new reg_attrs structure and insert it into the hash table if
377 one identical to it is not already in the table. We are doing this for
378 MEM of mode MODE. */
379
380static reg_attrs *
35cb5232 381get_reg_attrs (tree decl, int offset)
ca74b940 382{
383 reg_attrs attrs;
384 void **slot;
385
386 /* If everything is the default, we can just return zero. */
387 if (decl == 0 && offset == 0)
388 return 0;
389
390 attrs.decl = decl;
391 attrs.offset = offset;
392
393 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
394 if (*slot == 0)
395 {
396 *slot = ggc_alloc (sizeof (reg_attrs));
397 memcpy (*slot, &attrs, sizeof (reg_attrs));
398 }
399
2457c754 400 return (reg_attrs *) *slot;
ca74b940 401}
402
3072d30e 403
404#if !HAVE_blockage
405/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
406 across this insn. */
407
408rtx
409gen_blockage (void)
410{
411 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
412 MEM_VOLATILE_P (x) = true;
413 return x;
414}
415#endif
416
417
22cf44bc 418/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
419 don't attempt to share with the various global pieces of rtl (such as
420 frame_pointer_rtx). */
421
422rtx
35cb5232 423gen_raw_REG (enum machine_mode mode, int regno)
22cf44bc 424{
425 rtx x = gen_rtx_raw_REG (mode, regno);
426 ORIGINAL_REGNO (x) = regno;
427 return x;
428}
429
7014838c 430/* There are some RTL codes that require special attention; the generation
431 functions do the raw handling. If you add to this list, modify
432 special_rtx in gengenrtl.c as well. */
433
3ad7bb1c 434rtx
35cb5232 435gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3ad7bb1c 436{
73f5c1e3 437 void **slot;
438
3ad7bb1c 439 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 440 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 441
442#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
443 if (const_true_rtx && arg == STORE_FLAG_VALUE)
444 return const_true_rtx;
445#endif
446
73f5c1e3 447 /* Look up the CONST_INT in the hash table. */
2b3dbc20 448 slot = htab_find_slot_with_hash (const_int_htab, &arg,
449 (hashval_t) arg, INSERT);
7f2875d3 450 if (*slot == 0)
d7c47c0e 451 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 452
453 return (rtx) *slot;
3ad7bb1c 454}
455
2d232d05 456rtx
35cb5232 457gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2d232d05 458{
459 return GEN_INT (trunc_int_for_mode (c, mode));
460}
461
2ff23ed0 462/* CONST_DOUBLEs might be created from pairs of integers, or from
463 REAL_VALUE_TYPEs. Also, their length is known only at run time,
464 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
465
466/* Determine whether REAL, a CONST_DOUBLE, already exists in the
467 hash table. If so, return its counterpart; otherwise add it
468 to the hash table and return it. */
469static rtx
35cb5232 470lookup_const_double (rtx real)
2ff23ed0 471{
472 void **slot = htab_find_slot (const_double_htab, real, INSERT);
473 if (*slot == 0)
474 *slot = real;
475
476 return (rtx) *slot;
477}
7f2875d3 478
2ff23ed0 479/* Return a CONST_DOUBLE rtx for a floating-point value specified by
480 VALUE in mode MODE. */
67f2a2eb 481rtx
35cb5232 482const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
67f2a2eb 483{
2ff23ed0 484 rtx real = rtx_alloc (CONST_DOUBLE);
485 PUT_MODE (real, mode);
486
e8aaae4e 487 real->u.rv = value;
2ff23ed0 488
489 return lookup_const_double (real);
490}
491
e397ad8e 492/* Determine whether FIXED, a CONST_FIXED, already exists in the
493 hash table. If so, return its counterpart; otherwise add it
494 to the hash table and return it. */
495
496static rtx
497lookup_const_fixed (rtx fixed)
498{
499 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
500 if (*slot == 0)
501 *slot = fixed;
502
503 return (rtx) *slot;
504}
505
506/* Return a CONST_FIXED rtx for a fixed-point value specified by
507 VALUE in mode MODE. */
508
509rtx
510const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
511{
512 rtx fixed = rtx_alloc (CONST_FIXED);
513 PUT_MODE (fixed, mode);
514
515 fixed->u.fv = value;
516
517 return lookup_const_fixed (fixed);
518}
519
2ff23ed0 520/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
521 of ints: I0 is the low-order word and I1 is the high-order word.
522 Do not use this routine for non-integer modes; convert to
523 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
524
525rtx
35cb5232 526immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
2ff23ed0 527{
528 rtx value;
529 unsigned int i;
530
b1ca4af4 531 /* There are the following cases (note that there are no modes with
532 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
533
534 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
535 gen_int_mode.
536 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
537 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
538 from copies of the sign bit, and sign of i0 and i1 are the same), then
539 we return a CONST_INT for i0.
540 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
2ff23ed0 541 if (mode != VOIDmode)
542 {
611234b4 543 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
544 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
545 /* We can get a 0 for an error mark. */
546 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
547 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
2ff23ed0 548
b1ca4af4 549 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
550 return gen_int_mode (i0, mode);
551
552 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
2ff23ed0 553 }
554
555 /* If this integer fits in one word, return a CONST_INT. */
556 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
557 return GEN_INT (i0);
558
559 /* We use VOIDmode for integers. */
560 value = rtx_alloc (CONST_DOUBLE);
561 PUT_MODE (value, VOIDmode);
562
563 CONST_DOUBLE_LOW (value) = i0;
564 CONST_DOUBLE_HIGH (value) = i1;
565
566 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
567 XWINT (value, i) = 0;
568
569 return lookup_const_double (value);
67f2a2eb 570}
571
3ad7bb1c 572rtx
35cb5232 573gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3ad7bb1c 574{
575 /* In case the MD file explicitly references the frame pointer, have
576 all such references point to the same frame pointer. This is
577 used during frame pointer elimination to distinguish the explicit
578 references to these registers from pseudos that happened to be
579 assigned to them.
580
581 If we have eliminated the frame pointer or arg pointer, we will
582 be using it as a normal register, for example as a spill
583 register. In such cases, we might be accessing it in a mode that
584 is not Pmode and therefore cannot use the pre-allocated rtx.
585
586 Also don't do this when we are making new REGs in reload, since
587 we don't want to get confused with the real pointers. */
588
589 if (mode == Pmode && !reload_in_progress)
590 {
71801afc 591 if (regno == FRAME_POINTER_REGNUM
592 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 593 return frame_pointer_rtx;
594#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
71801afc 595 if (regno == HARD_FRAME_POINTER_REGNUM
596 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 597 return hard_frame_pointer_rtx;
598#endif
599#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
e8b59353 600 if (regno == ARG_POINTER_REGNUM)
3ad7bb1c 601 return arg_pointer_rtx;
602#endif
603#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 604 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 605 return return_address_pointer_rtx;
606#endif
3473aefe 607 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
6ea47475 608 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 609 return pic_offset_table_rtx;
e8b59353 610 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 611 return stack_pointer_rtx;
612 }
613
32b53d83 614#if 0
90295bd2 615 /* If the per-function register table has been set up, try to re-use
32b53d83 616 an existing entry in that table to avoid useless generation of RTL.
617
618 This code is disabled for now until we can fix the various backends
619 which depend on having non-shared hard registers in some cases. Long
620 term we want to re-enable this code as it can significantly cut down
71801afc 621 on the amount of useless RTL that gets generated.
622
623 We'll also need to fix some code that runs after reload that wants to
624 set ORIGINAL_REGNO. */
625
90295bd2 626 if (cfun
627 && cfun->emit
628 && regno_reg_rtx
629 && regno < FIRST_PSEUDO_REGISTER
630 && reg_raw_mode[regno] == mode)
631 return regno_reg_rtx[regno];
32b53d83 632#endif
90295bd2 633
22cf44bc 634 return gen_raw_REG (mode, regno);
3ad7bb1c 635}
636
b5ba9f3a 637rtx
35cb5232 638gen_rtx_MEM (enum machine_mode mode, rtx addr)
b5ba9f3a 639{
640 rtx rt = gen_rtx_raw_MEM (mode, addr);
641
642 /* This field is not cleared by the mere allocation of the rtx, so
643 we clear it here. */
c6259b83 644 MEM_ATTRS (rt) = 0;
b5ba9f3a 645
646 return rt;
647}
701e46d0 648
e265a6da 649/* Generate a memory referring to non-trapping constant memory. */
650
651rtx
652gen_const_mem (enum machine_mode mode, rtx addr)
653{
654 rtx mem = gen_rtx_MEM (mode, addr);
655 MEM_READONLY_P (mem) = 1;
656 MEM_NOTRAP_P (mem) = 1;
657 return mem;
658}
659
00060fc2 660/* Generate a MEM referring to fixed portions of the frame, e.g., register
661 save areas. */
662
663rtx
664gen_frame_mem (enum machine_mode mode, rtx addr)
665{
666 rtx mem = gen_rtx_MEM (mode, addr);
667 MEM_NOTRAP_P (mem) = 1;
668 set_mem_alias_set (mem, get_frame_alias_set ());
669 return mem;
670}
671
672/* Generate a MEM referring to a temporary use of the stack, not part
673 of the fixed stack frame. For example, something which is pushed
674 by a target splitter. */
675rtx
676gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
677{
678 rtx mem = gen_rtx_MEM (mode, addr);
679 MEM_NOTRAP_P (mem) = 1;
18d50ae6 680 if (!cfun->calls_alloca)
00060fc2 681 set_mem_alias_set (mem, get_frame_alias_set ());
682 return mem;
683}
684
2166bbaa 685/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
686 this construct would be valid, and false otherwise. */
687
688bool
689validate_subreg (enum machine_mode omode, enum machine_mode imode,
7ecb5bb2 690 const_rtx reg, unsigned int offset)
701e46d0 691{
2166bbaa 692 unsigned int isize = GET_MODE_SIZE (imode);
693 unsigned int osize = GET_MODE_SIZE (omode);
694
695 /* All subregs must be aligned. */
696 if (offset % osize != 0)
697 return false;
698
699 /* The subreg offset cannot be outside the inner object. */
700 if (offset >= isize)
701 return false;
702
703 /* ??? This should not be here. Temporarily continue to allow word_mode
704 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
705 Generally, backends are doing something sketchy but it'll take time to
706 fix them all. */
707 if (omode == word_mode)
708 ;
709 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
710 is the culprit here, and not the backends. */
711 else if (osize >= UNITS_PER_WORD && isize >= osize)
712 ;
713 /* Allow component subregs of complex and vector. Though given the below
714 extraction rules, it's not always clear what that means. */
715 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
716 && GET_MODE_INNER (imode) == omode)
717 ;
718 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
719 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
720 represent this. It's questionable if this ought to be represented at
721 all -- why can't this all be hidden in post-reload splitters that make
722 arbitrarily mode changes to the registers themselves. */
723 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
724 ;
725 /* Subregs involving floating point modes are not allowed to
726 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
727 (subreg:SI (reg:DF) 0) isn't. */
728 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
729 {
730 if (isize != osize)
731 return false;
732 }
701e46d0 733
2166bbaa 734 /* Paradoxical subregs must have offset zero. */
735 if (osize > isize)
736 return offset == 0;
737
738 /* This is a normal subreg. Verify that the offset is representable. */
739
740 /* For hard registers, we already have most of these rules collected in
741 subreg_offset_representable_p. */
742 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
743 {
744 unsigned int regno = REGNO (reg);
745
746#ifdef CANNOT_CHANGE_MODE_CLASS
747 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
748 && GET_MODE_INNER (imode) == omode)
749 ;
750 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
751 return false;
701e46d0 752#endif
2166bbaa 753
754 return subreg_offset_representable_p (regno, imode, offset, omode);
755 }
756
757 /* For pseudo registers, we want most of the same checks. Namely:
758 If the register no larger than a word, the subreg must be lowpart.
759 If the register is larger than a word, the subreg must be the lowpart
760 of a subword. A subreg does *not* perform arbitrary bit extraction.
761 Given that we've already checked mode/offset alignment, we only have
762 to check subword subregs here. */
763 if (osize < UNITS_PER_WORD)
764 {
765 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
766 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
767 if (offset % UNITS_PER_WORD != low_off)
768 return false;
769 }
770 return true;
771}
772
773rtx
774gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775{
776 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
2ff23ed0 777 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 778}
779
c6259b83 780/* Generate a SUBREG representing the least-significant part of REG if MODE
781 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782
701e46d0 783rtx
35cb5232 784gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
701e46d0 785{
786 enum machine_mode inmode;
701e46d0 787
788 inmode = GET_MODE (reg);
789 if (inmode == VOIDmode)
790 inmode = mode;
81802af6 791 return gen_rtx_SUBREG (mode, reg,
792 subreg_lowpart_offset (mode, inmode));
701e46d0 793}
7014838c 794\f
15bbde2b 795
cf9ac040 796/* Create an rtvec and stores within it the RTXen passed in the arguments. */
797
15bbde2b 798rtvec
ee582a61 799gen_rtvec (int n, ...)
15bbde2b 800{
cf9ac040 801 int i;
802 rtvec rt_val;
ee582a61 803 va_list p;
15bbde2b 804
ee582a61 805 va_start (p, n);
15bbde2b 806
cf9ac040 807 /* Don't allocate an empty rtvec... */
15bbde2b 808 if (n == 0)
cf9ac040 809 return NULL_RTVEC;
15bbde2b 810
cf9ac040 811 rt_val = rtvec_alloc (n);
e5fcd76a 812
15bbde2b 813 for (i = 0; i < n; i++)
cf9ac040 814 rt_val->elem[i] = va_arg (p, rtx);
7ad77798 815
ee582a61 816 va_end (p);
cf9ac040 817 return rt_val;
15bbde2b 818}
819
820rtvec
35cb5232 821gen_rtvec_v (int n, rtx *argp)
15bbde2b 822{
19cb6b50 823 int i;
824 rtvec rt_val;
15bbde2b 825
cf9ac040 826 /* Don't allocate an empty rtvec... */
15bbde2b 827 if (n == 0)
cf9ac040 828 return NULL_RTVEC;
15bbde2b 829
cf9ac040 830 rt_val = rtvec_alloc (n);
15bbde2b 831
832 for (i = 0; i < n; i++)
a4070a91 833 rt_val->elem[i] = *argp++;
15bbde2b 834
835 return rt_val;
836}
837\f
80c70e76 838/* Return the number of bytes between the start of an OUTER_MODE
839 in-memory value and the start of an INNER_MODE in-memory value,
840 given that the former is a lowpart of the latter. It may be a
841 paradoxical lowpart, in which case the offset will be negative
842 on big-endian targets. */
843
844int
845byte_lowpart_offset (enum machine_mode outer_mode,
846 enum machine_mode inner_mode)
847{
848 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
849 return subreg_lowpart_offset (outer_mode, inner_mode);
850 else
851 return -subreg_lowpart_offset (inner_mode, outer_mode);
852}
853\f
15bbde2b 854/* Generate a REG rtx for a new pseudo register of mode MODE.
855 This pseudo is assigned the next sequential register number. */
856
857rtx
35cb5232 858gen_reg_rtx (enum machine_mode mode)
15bbde2b 859{
19cb6b50 860 rtx val;
27a7a23a 861 unsigned int align = GET_MODE_ALIGNMENT (mode);
15bbde2b 862
1b7ff857 863 gcc_assert (can_create_pseudo_p ());
15bbde2b 864
27a7a23a 865 /* If a virtual register with bigger mode alignment is generated,
866 increase stack alignment estimation because it might be spilled
867 to stack later. */
868 if (SUPPORTS_STACK_ALIGNMENT
869 && crtl->stack_alignment_estimated < align
870 && !crtl->stack_realign_processed)
8645d3e7 871 {
872 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
873 if (crtl->stack_alignment_estimated < min_align)
874 crtl->stack_alignment_estimated = min_align;
875 }
27a7a23a 876
316bc009 877 if (generating_concat_p
878 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
879 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 880 {
881 /* For complex modes, don't make a single pseudo.
882 Instead, make a CONCAT of two pseudos.
883 This allows noncontiguous allocation of the real and imaginary parts,
884 which makes much better code. Besides, allocating DCmode
885 pseudos overstrains reload on some machines like the 386. */
886 rtx realpart, imagpart;
e9e12845 887 enum machine_mode partmode = GET_MODE_INNER (mode);
76c37538 888
889 realpart = gen_reg_rtx (partmode);
890 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 891 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 892 }
893
ca74b940 894 /* Make sure regno_pointer_align, and regno_reg_rtx are large
fcdc122e 895 enough to have an element for this pseudo reg number. */
15bbde2b 896
fd6ffb7c 897 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
15bbde2b 898 {
fd6ffb7c 899 int old_size = crtl->emit.regno_pointer_align_length;
9ce37fa7 900 char *tmp;
fcdc122e 901 rtx *new1;
fcdc122e 902
9ce37fa7 903 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
904 memset (tmp + old_size, 0, old_size);
905 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
0a893c29 906
2457c754 907 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
0a893c29 908 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 909 regno_reg_rtx = new1;
910
fd6ffb7c 911 crtl->emit.regno_pointer_align_length = old_size * 2;
15bbde2b 912 }
913
22cf44bc 914 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 915 regno_reg_rtx[reg_rtx_no++] = val;
916 return val;
917}
918
80c70e76 919/* Update NEW with the same attributes as REG, but with OFFSET added
920 to the REG_OFFSET. */
ca74b940 921
1a6a0f2a 922static void
9ce37fa7 923update_reg_offset (rtx new_rtx, rtx reg, int offset)
ca74b940 924{
9ce37fa7 925 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
35cb5232 926 REG_OFFSET (reg) + offset);
1a6a0f2a 927}
928
80c70e76 929/* Generate a register with same attributes as REG, but with OFFSET
930 added to the REG_OFFSET. */
1a6a0f2a 931
932rtx
933gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
934 int offset)
935{
9ce37fa7 936 rtx new_rtx = gen_rtx_REG (mode, regno);
1a6a0f2a 937
9ce37fa7 938 update_reg_offset (new_rtx, reg, offset);
939 return new_rtx;
1a6a0f2a 940}
941
942/* Generate a new pseudo-register with the same attributes as REG, but
80c70e76 943 with OFFSET added to the REG_OFFSET. */
1a6a0f2a 944
945rtx
946gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
947{
9ce37fa7 948 rtx new_rtx = gen_reg_rtx (mode);
1a6a0f2a 949
9ce37fa7 950 update_reg_offset (new_rtx, reg, offset);
951 return new_rtx;
ca74b940 952}
953
80c70e76 954/* Adjust REG in-place so that it has mode MODE. It is assumed that the
955 new register is a (possibly paradoxical) lowpart of the old one. */
ca74b940 956
957void
80c70e76 958adjust_reg_mode (rtx reg, enum machine_mode mode)
ca74b940 959{
80c70e76 960 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
961 PUT_MODE (reg, mode);
962}
963
964/* Copy REG's attributes from X, if X has any attributes. If REG and X
965 have different modes, REG is a (possibly paradoxical) lowpart of X. */
966
967void
968set_reg_attrs_from_value (rtx reg, rtx x)
969{
970 int offset;
971
ac56145e 972 /* Hard registers can be reused for multiple purposes within the same
973 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
974 on them is wrong. */
975 if (HARD_REGISTER_P (reg))
976 return;
977
80c70e76 978 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
ae12ddda 979 if (MEM_P (x))
980 {
971ba038 981 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
ae12ddda 982 REG_ATTRS (reg)
983 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
984 if (MEM_POINTER (x))
40b93dba 985 mark_reg_pointer (reg, 0);
ae12ddda 986 }
987 else if (REG_P (x))
988 {
989 if (REG_ATTRS (x))
990 update_reg_offset (reg, x, offset);
991 if (REG_POINTER (x))
992 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
993 }
994}
995
996/* Generate a REG rtx for a new pseudo register, copying the mode
997 and attributes from X. */
998
999rtx
1000gen_reg_rtx_and_attrs (rtx x)
1001{
1002 rtx reg = gen_reg_rtx (GET_MODE (x));
1003 set_reg_attrs_from_value (reg, x);
1004 return reg;
ca74b940 1005}
1006
263c416c 1007/* Set the register attributes for registers contained in PARM_RTX.
1008 Use needed values from memory attributes of MEM. */
1009
1010void
35cb5232 1011set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
263c416c 1012{
8ad4c111 1013 if (REG_P (parm_rtx))
80c70e76 1014 set_reg_attrs_from_value (parm_rtx, mem);
263c416c 1015 else if (GET_CODE (parm_rtx) == PARALLEL)
1016 {
1017 /* Check for a NULL entry in the first slot, used to indicate that the
1018 parameter goes both on the stack and in registers. */
1019 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1020 for (; i < XVECLEN (parm_rtx, 0); i++)
1021 {
1022 rtx x = XVECEXP (parm_rtx, 0, i);
8ad4c111 1023 if (REG_P (XEXP (x, 0)))
263c416c 1024 REG_ATTRS (XEXP (x, 0))
1025 = get_reg_attrs (MEM_EXPR (mem),
1026 INTVAL (XEXP (x, 1)));
1027 }
1028 }
1029}
1030
80c70e76 1031/* Set the REG_ATTRS for registers in value X, given that X represents
1032 decl T. */
ca74b940 1033
a8dd994c 1034void
80c70e76 1035set_reg_attrs_for_decl_rtl (tree t, rtx x)
1036{
1037 if (GET_CODE (x) == SUBREG)
ebfc27f5 1038 {
80c70e76 1039 gcc_assert (subreg_lowpart_p (x));
1040 x = SUBREG_REG (x);
ebfc27f5 1041 }
8ad4c111 1042 if (REG_P (x))
80c70e76 1043 REG_ATTRS (x)
1044 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
3ff99faa 1045 DECL_MODE (t)));
ca74b940 1046 if (GET_CODE (x) == CONCAT)
1047 {
1048 if (REG_P (XEXP (x, 0)))
1049 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1050 if (REG_P (XEXP (x, 1)))
1051 REG_ATTRS (XEXP (x, 1))
1052 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1053 }
1054 if (GET_CODE (x) == PARALLEL)
1055 {
85d25060 1056 int i, start;
1057
1058 /* Check for a NULL entry, used to indicate that the parameter goes
1059 both on the stack and in registers. */
1060 if (XEXP (XVECEXP (x, 0, 0), 0))
1061 start = 0;
1062 else
1063 start = 1;
1064
1065 for (i = start; i < XVECLEN (x, 0); i++)
ca74b940 1066 {
1067 rtx y = XVECEXP (x, 0, i);
1068 if (REG_P (XEXP (y, 0)))
1069 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1070 }
1071 }
1072}
1073
80c70e76 1074/* Assign the RTX X to declaration T. */
1075
1076void
1077set_decl_rtl (tree t, rtx x)
1078{
1079 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1080 if (x)
1081 set_reg_attrs_for_decl_rtl (t, x);
1082}
1083
d91cf567 1084/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1085 if the ABI requires the parameter to be passed by reference. */
80c70e76 1086
1087void
d91cf567 1088set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
80c70e76 1089{
1090 DECL_INCOMING_RTL (t) = x;
d91cf567 1091 if (x && !by_reference_p)
80c70e76 1092 set_reg_attrs_for_decl_rtl (t, x);
1093}
1094
de8ecfb5 1095/* Identify REG (which may be a CONCAT) as a user register. */
1096
1097void
35cb5232 1098mark_user_reg (rtx reg)
de8ecfb5 1099{
1100 if (GET_CODE (reg) == CONCAT)
1101 {
1102 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1103 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1104 }
de8ecfb5 1105 else
611234b4 1106 {
1107 gcc_assert (REG_P (reg));
1108 REG_USERVAR_P (reg) = 1;
1109 }
de8ecfb5 1110}
1111
d4c332ff 1112/* Identify REG as a probable pointer register and show its alignment
1113 as ALIGN, if nonzero. */
15bbde2b 1114
1115void
35cb5232 1116mark_reg_pointer (rtx reg, int align)
15bbde2b 1117{
e61a0a7f 1118 if (! REG_POINTER (reg))
612409a6 1119 {
e61a0a7f 1120 REG_POINTER (reg) = 1;
d4c332ff 1121
612409a6 1122 if (align)
1123 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1124 }
1125 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
8b332087 1126 /* We can no-longer be sure just how aligned this pointer is. */
d4c332ff 1127 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 1128}
1129
1130/* Return 1 plus largest pseudo reg number used in the current function. */
1131
1132int
35cb5232 1133max_reg_num (void)
15bbde2b 1134{
1135 return reg_rtx_no;
1136}
1137
1138/* Return 1 + the largest label number used so far in the current function. */
1139
1140int
35cb5232 1141max_label_num (void)
15bbde2b 1142{
15bbde2b 1143 return label_num;
1144}
1145
1146/* Return first label number used in this function (if any were used). */
1147
1148int
35cb5232 1149get_first_label_num (void)
15bbde2b 1150{
1151 return first_label_num;
1152}
4ee9c684 1153
1154/* If the rtx for label was created during the expansion of a nested
1155 function, then first_label_num won't include this label number.
f0b5f617 1156 Fix this now so that array indices work later. */
4ee9c684 1157
1158void
1159maybe_set_first_label_num (rtx x)
1160{
1161 if (CODE_LABEL_NUMBER (x) < first_label_num)
1162 first_label_num = CODE_LABEL_NUMBER (x);
1163}
15bbde2b 1164\f
1165/* Return a value representing some low-order bits of X, where the number
1166 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1167 between floating-point and fixed-point values, rather, the bit
15bbde2b 1168 representation is returned.
1169
1170 This function handles the cases in common between gen_lowpart, below,
1171 and two variants in cse.c and combine.c. These are the cases that can
1172 be safely handled at all points in the compilation.
1173
1174 If this is not a case we can handle, return 0. */
1175
1176rtx
35cb5232 1177gen_lowpart_common (enum machine_mode mode, rtx x)
15bbde2b 1178{
701e46d0 1179 int msize = GET_MODE_SIZE (mode);
791172c5 1180 int xsize;
701e46d0 1181 int offset = 0;
791172c5 1182 enum machine_mode innermode;
1183
1184 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1185 so we have to make one up. Yuk. */
1186 innermode = GET_MODE (x);
971ba038 1187 if (CONST_INT_P (x)
6c799a83 1188 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
791172c5 1189 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1190 else if (innermode == VOIDmode)
1191 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1192
1193 xsize = GET_MODE_SIZE (innermode);
1194
611234b4 1195 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
15bbde2b 1196
791172c5 1197 if (innermode == mode)
15bbde2b 1198 return x;
1199
1200 /* MODE must occupy no more words than the mode of X. */
791172c5 1201 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1202 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
15bbde2b 1203 return 0;
1204
9abe1e73 1205 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
cee7491d 1206 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
9abe1e73 1207 return 0;
1208
791172c5 1209 offset = subreg_lowpart_offset (mode, innermode);
15bbde2b 1210
1211 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 1212 && (GET_MODE_CLASS (mode) == MODE_INT
1213 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 1214 {
1215 /* If we are getting the low-order part of something that has been
1216 sign- or zero-extended, we can either just use the object being
1217 extended or make a narrower extension. If we want an even smaller
1218 piece than the size of the object being extended, call ourselves
1219 recursively.
1220
1221 This case is used mostly by combine and cse. */
1222
1223 if (GET_MODE (XEXP (x, 0)) == mode)
1224 return XEXP (x, 0);
791172c5 1225 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
15bbde2b 1226 return gen_lowpart_common (mode, XEXP (x, 0));
791172c5 1227 else if (msize < xsize)
3ad7bb1c 1228 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 1229 }
8ad4c111 1230 else if (GET_CODE (x) == SUBREG || REG_P (x)
791172c5 1231 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
971ba038 1232 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
791172c5 1233 return simplify_gen_subreg (mode, x, innermode, offset);
4a307dd5 1234
15bbde2b 1235 /* Otherwise, we can't do this. */
1236 return 0;
1237}
1238\f
d56d0ca2 1239rtx
35cb5232 1240gen_highpart (enum machine_mode mode, rtx x)
d56d0ca2 1241{
701e46d0 1242 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1243 rtx result;
701e46d0 1244
d56d0ca2 1245 /* This case loses if X is a subreg. To catch bugs early,
1246 complain if an invalid MODE is used even in other cases. */
611234b4 1247 gcc_assert (msize <= UNITS_PER_WORD
1248 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
701e46d0 1249
81802af6 1250 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1251 subreg_highpart_offset (mode, GET_MODE (x)));
611234b4 1252 gcc_assert (result);
1253
a8c36ab2 1254 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1255 the target if we have a MEM. gen_highpart must return a valid operand,
1256 emitting code if necessary to do so. */
611234b4 1257 if (MEM_P (result))
1258 {
1259 result = validize_mem (result);
1260 gcc_assert (result);
1261 }
1262
81802af6 1263 return result;
1264}
704fcf2b 1265
29d56731 1266/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1267 be VOIDmode constant. */
1268rtx
35cb5232 1269gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
704fcf2b 1270{
1271 if (GET_MODE (exp) != VOIDmode)
1272 {
611234b4 1273 gcc_assert (GET_MODE (exp) == innermode);
704fcf2b 1274 return gen_highpart (outermode, exp);
1275 }
1276 return simplify_gen_subreg (outermode, exp, innermode,
1277 subreg_highpart_offset (outermode, innermode));
1278}
d4c5e26d 1279
80c70e76 1280/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
10ef59ac 1281
81802af6 1282unsigned int
35cb5232 1283subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
81802af6 1284{
1285 unsigned int offset = 0;
1286 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1287
81802af6 1288 if (difference > 0)
d56d0ca2 1289 {
81802af6 1290 if (WORDS_BIG_ENDIAN)
1291 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1292 if (BYTES_BIG_ENDIAN)
1293 offset += difference % UNITS_PER_WORD;
d56d0ca2 1294 }
701e46d0 1295
81802af6 1296 return offset;
d56d0ca2 1297}
64ab453f 1298
81802af6 1299/* Return offset in bytes to get OUTERMODE high part
1300 of the value in mode INNERMODE stored in memory in target format. */
1301unsigned int
35cb5232 1302subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
64ab453f 1303{
1304 unsigned int offset = 0;
1305 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1306
611234b4 1307 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
81802af6 1308
64ab453f 1309 if (difference > 0)
1310 {
81802af6 1311 if (! WORDS_BIG_ENDIAN)
64ab453f 1312 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1313 if (! BYTES_BIG_ENDIAN)
64ab453f 1314 offset += difference % UNITS_PER_WORD;
1315 }
1316
81802af6 1317 return offset;
64ab453f 1318}
d56d0ca2 1319
15bbde2b 1320/* Return 1 iff X, assumed to be a SUBREG,
1321 refers to the least significant part of its containing reg.
1322 If X is not a SUBREG, always return 1 (it is its own low part!). */
1323
1324int
b7bf20db 1325subreg_lowpart_p (const_rtx x)
15bbde2b 1326{
1327 if (GET_CODE (x) != SUBREG)
1328 return 1;
7e14c1bf 1329 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1330 return 0;
15bbde2b 1331
81802af6 1332 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1333 == SUBREG_BYTE (x));
15bbde2b 1334}
1335\f
701e46d0 1336/* Return subword OFFSET of operand OP.
1337 The word number, OFFSET, is interpreted as the word number starting
1338 at the low-order address. OFFSET 0 is the low-order word if not
1339 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1340
1341 If we cannot extract the required word, we return zero. Otherwise,
1342 an rtx corresponding to the requested word will be returned.
1343
1344 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1345 reload has completed, a valid address will always be returned. After
1346 reload, if a valid address cannot be returned, we return zero.
1347
1348 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1349 it is the responsibility of the caller.
1350
1351 MODE is the mode of OP in case it is a CONST_INT.
1352
1353 ??? This is still rather broken for some cases. The problem for the
1354 moment is that all callers of this thing provide no 'goal mode' to
1355 tell us to work with. This exists because all callers were written
84e81e84 1356 in a word based SUBREG world.
1357 Now use of this function can be deprecated by simplify_subreg in most
1358 cases.
1359 */
701e46d0 1360
1361rtx
35cb5232 1362operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
701e46d0 1363{
1364 if (mode == VOIDmode)
1365 mode = GET_MODE (op);
1366
611234b4 1367 gcc_assert (mode != VOIDmode);
701e46d0 1368
6312a35e 1369 /* If OP is narrower than a word, fail. */
701e46d0 1370 if (mode != BLKmode
1371 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1372 return 0;
1373
6312a35e 1374 /* If we want a word outside OP, return zero. */
701e46d0 1375 if (mode != BLKmode
1376 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1377 return const0_rtx;
1378
701e46d0 1379 /* Form a new MEM at the requested address. */
e16ceb8e 1380 if (MEM_P (op))
701e46d0 1381 {
9ce37fa7 1382 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1383
e4e86ec5 1384 if (! validate_address)
9ce37fa7 1385 return new_rtx;
e4e86ec5 1386
1387 else if (reload_completed)
701e46d0 1388 {
bd1a81f7 1389 if (! strict_memory_address_addr_space_p (word_mode,
1390 XEXP (new_rtx, 0),
1391 MEM_ADDR_SPACE (op)))
e4e86ec5 1392 return 0;
701e46d0 1393 }
e4e86ec5 1394 else
9ce37fa7 1395 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
701e46d0 1396 }
1397
84e81e84 1398 /* Rest can be handled by simplify_subreg. */
1399 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1400}
1401
89f18f73 1402/* Similar to `operand_subword', but never return 0. If we can't
1403 extract the required subword, put OP into a register and try again.
1404 The second attempt must succeed. We always validate the address in
1405 this case.
15bbde2b 1406
1407 MODE is the mode of OP, in case it is CONST_INT. */
1408
1409rtx
35cb5232 1410operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
15bbde2b 1411{
701e46d0 1412 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1413
1414 if (result)
1415 return result;
1416
1417 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1418 {
1419 /* If this is a register which can not be accessed by words, copy it
1420 to a pseudo register. */
8ad4c111 1421 if (REG_P (op))
ac825d29 1422 op = copy_to_reg (op);
1423 else
1424 op = force_reg (mode, op);
1425 }
15bbde2b 1426
701e46d0 1427 result = operand_subword (op, offset, 1, mode);
611234b4 1428 gcc_assert (result);
15bbde2b 1429
1430 return result;
1431}
1432\f
b3ff8d90 1433/* Returns 1 if both MEM_EXPR can be considered equal
1434 and 0 otherwise. */
1435
1436int
52d07779 1437mem_expr_equal_p (const_tree expr1, const_tree expr2)
b3ff8d90 1438{
1439 if (expr1 == expr2)
1440 return 1;
1441
1442 if (! expr1 || ! expr2)
1443 return 0;
1444
1445 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1446 return 0;
1447
3a443843 1448 return operand_equal_p (expr1, expr2, 0);
b3ff8d90 1449}
1450
ad0a178f 1451/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1452 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1453 -1 if not known. */
1454
1455int
7cfdc2f0 1456get_mem_align_offset (rtx mem, unsigned int align)
ad0a178f 1457{
1458 tree expr;
1459 unsigned HOST_WIDE_INT offset;
1460
1461 /* This function can't use
1462 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1463 || !CONST_INT_P (MEM_OFFSET (mem))
1464 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
1465 < align))
1466 return -1;
1467 else
1468 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1469 for two reasons:
1470 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1471 for <variable>. get_inner_reference doesn't handle it and
1472 even if it did, the alignment in that case needs to be determined
1473 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1474 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1475 isn't sufficiently aligned, the object it is in might be. */
1476 gcc_assert (MEM_P (mem));
1477 expr = MEM_EXPR (mem);
1478 if (expr == NULL_TREE
1479 || MEM_OFFSET (mem) == NULL_RTX
1480 || !CONST_INT_P (MEM_OFFSET (mem)))
1481 return -1;
1482
1483 offset = INTVAL (MEM_OFFSET (mem));
1484 if (DECL_P (expr))
1485 {
1486 if (DECL_ALIGN (expr) < align)
1487 return -1;
1488 }
1489 else if (INDIRECT_REF_P (expr))
1490 {
1491 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1492 return -1;
1493 }
1494 else if (TREE_CODE (expr) == COMPONENT_REF)
1495 {
1496 while (1)
1497 {
1498 tree inner = TREE_OPERAND (expr, 0);
1499 tree field = TREE_OPERAND (expr, 1);
1500 tree byte_offset = component_ref_field_offset (expr);
1501 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1502
1503 if (!byte_offset
1504 || !host_integerp (byte_offset, 1)
1505 || !host_integerp (bit_offset, 1))
1506 return -1;
1507
1508 offset += tree_low_cst (byte_offset, 1);
1509 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1510
1511 if (inner == NULL_TREE)
1512 {
1513 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1514 < (unsigned int) align)
1515 return -1;
1516 break;
1517 }
1518 else if (DECL_P (inner))
1519 {
1520 if (DECL_ALIGN (inner) < align)
1521 return -1;
1522 break;
1523 }
1524 else if (TREE_CODE (inner) != COMPONENT_REF)
1525 return -1;
1526 expr = inner;
1527 }
1528 }
1529 else
1530 return -1;
1531
1532 return offset & ((align / BITS_PER_UNIT) - 1);
1533}
1534
310b57a1 1535/* Given REF (a MEM) and T, either the type of X or the expression
c6259b83 1536 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1537 if we are making a new object of this type. BITPOS is nonzero if
1538 there is an offset outstanding on T that will be applied later. */
c6259b83 1539
1540void
35cb5232 1541set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1542 HOST_WIDE_INT bitpos)
c6259b83 1543{
32c2fdea 1544 alias_set_type alias = MEM_ALIAS_SET (ref);
b10dbbca 1545 tree expr = MEM_EXPR (ref);
2a631e19 1546 rtx offset = MEM_OFFSET (ref);
1547 rtx size = MEM_SIZE (ref);
1548 unsigned int align = MEM_ALIGN (ref);
6f717f77 1549 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1550 tree type;
1551
1552 /* It can happen that type_for_mode was given a mode for which there
1553 is no language-level type. In which case it returns NULL, which
1554 we can see here. */
1555 if (t == NULL_TREE)
1556 return;
1557
1558 type = TYPE_P (t) ? t : TREE_TYPE (t);
4ccffa39 1559 if (type == error_mark_node)
1560 return;
c6259b83 1561
c6259b83 1562 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1563 wrong answer, as it assumes that DECL_RTL already has the right alias
1564 info. Callers should not set DECL_RTL until after the call to
1565 set_mem_attributes. */
611234b4 1566 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
c6259b83 1567
96216d37 1568 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1569 front-end routine) and use it. */
1570 alias = get_alias_set (t);
c6259b83 1571
fbc6244b 1572 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
cfefc966 1573 MEM_IN_STRUCT_P (ref)
1574 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
8d350e69 1575 MEM_POINTER (ref) = POINTER_TYPE_P (type);
c6259b83 1576
2a631e19 1577 /* If we are making an object of this type, or if this is a DECL, we know
1578 that it is a scalar if the type is not an aggregate. */
cfefc966 1579 if ((objectp || DECL_P (t))
1580 && ! AGGREGATE_TYPE_P (type)
1581 && TREE_CODE (type) != COMPLEX_TYPE)
c6259b83 1582 MEM_SCALAR_P (ref) = 1;
1583
a9d9ab08 1584 /* We can set the alignment from the type if we are making an object,
1585 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
b056d812 1586 if (objectp || TREE_CODE (t) == INDIRECT_REF
1587 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1588 || TYPE_ALIGN_OK (type))
a9d9ab08 1589 align = MAX (align, TYPE_ALIGN (type));
b056d812 1590 else
1591 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1592 {
1593 if (integer_zerop (TREE_OPERAND (t, 1)))
1594 /* We don't know anything about the alignment. */
1595 align = BITS_PER_UNIT;
1596 else
1597 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1598 }
ecfe4ca9 1599
96216d37 1600 /* If the size is known, we can set that. */
1601 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
2a631e19 1602 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
96216d37 1603
579bccf9 1604 /* If T is not a type, we may be able to deduce some more information about
1605 the expression. */
1606 if (! TYPE_P (t))
2a631e19 1607 {
ae2dd339 1608 tree base;
698537d1 1609 bool align_computed = false;
b04fab2a 1610
2a631e19 1611 if (TREE_THIS_VOLATILE (t))
1612 MEM_VOLATILE_P (ref) = 1;
c6259b83 1613
3c00f11c 1614 /* Now remove any conversions: they don't change what the underlying
1615 object is. Likewise for SAVE_EXPR. */
72dd6141 1616 while (CONVERT_EXPR_P (t)
3c00f11c 1617 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1618 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1619 t = TREE_OPERAND (t, 0);
1620
ae2dd339 1621 /* We may look through structure-like accesses for the purposes of
1622 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1623 base = t;
1624 while (TREE_CODE (base) == COMPONENT_REF
1625 || TREE_CODE (base) == REALPART_EXPR
1626 || TREE_CODE (base) == IMAGPART_EXPR
1627 || TREE_CODE (base) == BIT_FIELD_REF)
1628 base = TREE_OPERAND (base, 0);
1629
1630 if (DECL_P (base))
1631 {
1632 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1633 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1634 else
1635 MEM_NOTRAP_P (ref) = 1;
1636 }
1637 else
1638 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1639
1640 base = get_base_address (base);
1641 if (base && DECL_P (base)
1642 && TREE_READONLY (base)
1643 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1644 {
1645 tree base_type = TREE_TYPE (base);
1646 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1647 || DECL_ARTIFICIAL (base));
1648 MEM_READONLY_P (ref) = 1;
1649 }
1650
2b02580f 1651 /* If this expression uses it's parent's alias set, mark it such
1652 that we won't change it. */
1653 if (component_uses_parent_alias_set (t))
5cc193e7 1654 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1655
2a631e19 1656 /* If this is a decl, set the attributes of the MEM from it. */
1657 if (DECL_P (t))
1658 {
b10dbbca 1659 expr = t;
1660 offset = const0_rtx;
6f717f77 1661 apply_bitpos = bitpos;
2a631e19 1662 size = (DECL_SIZE_UNIT (t)
1663 && host_integerp (DECL_SIZE_UNIT (t), 1)
1664 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
d4c5e26d 1665 align = DECL_ALIGN (t);
698537d1 1666 align_computed = true;
2a631e19 1667 }
1668
ecfe4ca9 1669 /* If this is a constant, we know the alignment. */
ce45a448 1670 else if (CONSTANT_CLASS_P (t))
42f6f447 1671 {
1672 align = TYPE_ALIGN (type);
1673#ifdef CONSTANT_ALIGNMENT
1674 align = CONSTANT_ALIGNMENT (t, align);
1675#endif
698537d1 1676 align_computed = true;
42f6f447 1677 }
b10dbbca 1678
1679 /* If this is a field reference and not a bit-field, record it. */
f0b5f617 1680 /* ??? There is some information that can be gleaned from bit-fields,
b10dbbca 1681 such as the word offset in the structure that might be modified.
1682 But skip it for now. */
1683 else if (TREE_CODE (t) == COMPONENT_REF
1684 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1685 {
3a443843 1686 expr = t;
b10dbbca 1687 offset = const0_rtx;
6f717f77 1688 apply_bitpos = bitpos;
b10dbbca 1689 /* ??? Any reason the field size would be different than
1690 the size we got from the type? */
1691 }
1692
1693 /* If this is an array reference, look for an outer field reference. */
1694 else if (TREE_CODE (t) == ARRAY_REF)
1695 {
1696 tree off_tree = size_zero_node;
6b039979 1697 /* We can't modify t, because we use it at the end of the
1698 function. */
1699 tree t2 = t;
b10dbbca 1700
1701 do
1702 {
6b039979 1703 tree index = TREE_OPERAND (t2, 1);
6374121b 1704 tree low_bound = array_ref_low_bound (t2);
1705 tree unit_size = array_ref_element_size (t2);
97f8ce30 1706
1707 /* We assume all arrays have sizes that are a multiple of a byte.
1708 First subtract the lower bound, if any, in the type of the
6374121b 1709 index, then convert to sizetype and multiply by the size of
1710 the array element. */
1711 if (! integer_zerop (low_bound))
faa43f85 1712 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1713 index, low_bound);
97f8ce30 1714
6374121b 1715 off_tree = size_binop (PLUS_EXPR,
535664e3 1716 size_binop (MULT_EXPR,
1717 fold_convert (sizetype,
1718 index),
6374121b 1719 unit_size),
1720 off_tree);
6b039979 1721 t2 = TREE_OPERAND (t2, 0);
b10dbbca 1722 }
6b039979 1723 while (TREE_CODE (t2) == ARRAY_REF);
b10dbbca 1724
6b039979 1725 if (DECL_P (t2))
2d8fe5d0 1726 {
6b039979 1727 expr = t2;
0318dc09 1728 offset = NULL;
2d8fe5d0 1729 if (host_integerp (off_tree, 1))
0318dc09 1730 {
1731 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1732 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
6b039979 1733 align = DECL_ALIGN (t2);
3473aefe 1734 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
0318dc09 1735 align = aoff;
698537d1 1736 align_computed = true;
0318dc09 1737 offset = GEN_INT (ioff);
6f717f77 1738 apply_bitpos = bitpos;
0318dc09 1739 }
2d8fe5d0 1740 }
6b039979 1741 else if (TREE_CODE (t2) == COMPONENT_REF)
b10dbbca 1742 {
3a443843 1743 expr = t2;
1744 offset = NULL;
b10dbbca 1745 if (host_integerp (off_tree, 1))
6f717f77 1746 {
1747 offset = GEN_INT (tree_low_cst (off_tree, 1));
1748 apply_bitpos = bitpos;
1749 }
b10dbbca 1750 /* ??? Any reason the field size would be different than
1751 the size we got from the type? */
1752 }
2d8fe5d0 1753 else if (flag_argument_noalias > 1
2a448a75 1754 && (INDIRECT_REF_P (t2))
6b039979 1755 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
2d8fe5d0 1756 {
6b039979 1757 expr = t2;
2d8fe5d0 1758 offset = NULL;
1759 }
1760 }
1761
1762 /* If this is a Fortran indirect argument reference, record the
1763 parameter decl. */
1764 else if (flag_argument_noalias > 1
2a448a75 1765 && (INDIRECT_REF_P (t))
2d8fe5d0 1766 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1767 {
1768 expr = t;
1769 offset = NULL;
b10dbbca 1770 }
698537d1 1771
1772 if (!align_computed && !INDIRECT_REF_P (t))
1773 {
1774 unsigned int obj_align
1775 = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
1776 align = MAX (align, obj_align);
1777 }
2a631e19 1778 }
1779
e2e205b3 1780 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 1781 bit position offset. Similarly, increase the size of the accessed
1782 object to contain the negative offset. */
6f717f77 1783 if (apply_bitpos)
595f1461 1784 {
1785 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1786 if (size)
1787 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1788 }
6f717f77 1789
b056d812 1790 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1791 {
f0b5f617 1792 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
b056d812 1793 we're overlapping. */
1794 offset = NULL;
1795 expr = NULL;
1796 }
1797
2a631e19 1798 /* Now set the attributes we computed above. */
5cc193e7 1799 MEM_ATTRS (ref)
bd1a81f7 1800 = get_mem_attrs (alias, expr, offset, size, align,
1801 TYPE_ADDR_SPACE (type), GET_MODE (ref));
2a631e19 1802
1803 /* If this is already known to be a scalar or aggregate, we are done. */
1804 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
96216d37 1805 return;
1806
2a631e19 1807 /* If it is a reference into an aggregate, this is part of an aggregate.
1808 Otherwise we don't know. */
c6259b83 1809 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1810 || TREE_CODE (t) == ARRAY_RANGE_REF
1811 || TREE_CODE (t) == BIT_FIELD_REF)
1812 MEM_IN_STRUCT_P (ref) = 1;
1813}
1814
6f717f77 1815void
35cb5232 1816set_mem_attributes (rtx ref, tree t, int objectp)
6f717f77 1817{
1818 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1819}
1820
c6259b83 1821/* Set the alias set of MEM to SET. */
1822
1823void
32c2fdea 1824set_mem_alias_set (rtx mem, alias_set_type set)
c6259b83 1825{
d4c5e26d 1826#ifdef ENABLE_CHECKING
c6259b83 1827 /* If the new and old alias sets don't conflict, something is wrong. */
611234b4 1828 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
c6259b83 1829#endif
1830
b10dbbca 1831 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
5cc193e7 1832 MEM_SIZE (mem), MEM_ALIGN (mem),
bd1a81f7 1833 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1834}
1835
1836/* Set the address space of MEM to ADDRSPACE (target-defined). */
1837
1838void
1839set_mem_addr_space (rtx mem, addr_space_t addrspace)
1840{
1841 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1842 MEM_OFFSET (mem), MEM_SIZE (mem),
1843 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
c6259b83 1844}
96216d37 1845
1c4512da 1846/* Set the alignment of MEM to ALIGN bits. */
96216d37 1847
1848void
35cb5232 1849set_mem_align (rtx mem, unsigned int align)
96216d37 1850{
b10dbbca 1851 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
5cc193e7 1852 MEM_OFFSET (mem), MEM_SIZE (mem), align,
bd1a81f7 1853 MEM_ADDR_SPACE (mem), GET_MODE (mem));
96216d37 1854}
278fe152 1855
b10dbbca 1856/* Set the expr for MEM to EXPR. */
278fe152 1857
1858void
35cb5232 1859set_mem_expr (rtx mem, tree expr)
278fe152 1860{
1861 MEM_ATTRS (mem)
b10dbbca 1862 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
bd1a81f7 1863 MEM_SIZE (mem), MEM_ALIGN (mem),
1864 MEM_ADDR_SPACE (mem), GET_MODE (mem));
278fe152 1865}
b10dbbca 1866
1867/* Set the offset of MEM to OFFSET. */
1868
1869void
35cb5232 1870set_mem_offset (rtx mem, rtx offset)
b10dbbca 1871{
1872 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1873 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
bd1a81f7 1874 MEM_ADDR_SPACE (mem), GET_MODE (mem));
f0500469 1875}
1876
1877/* Set the size of MEM to SIZE. */
1878
1879void
35cb5232 1880set_mem_size (rtx mem, rtx size)
f0500469 1881{
1882 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1883 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
bd1a81f7 1884 MEM_ADDR_SPACE (mem), GET_MODE (mem));
b10dbbca 1885}
c6259b83 1886\f
96216d37 1887/* Return a memory reference like MEMREF, but with its mode changed to MODE
1888 and its address changed to ADDR. (VOIDmode means don't change the mode.
1889 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1890 returned memory location is required to be valid. The memory
1891 attributes are not changed. */
15bbde2b 1892
96216d37 1893static rtx
35cb5232 1894change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
15bbde2b 1895{
bd1a81f7 1896 addr_space_t as;
9ce37fa7 1897 rtx new_rtx;
15bbde2b 1898
611234b4 1899 gcc_assert (MEM_P (memref));
bd1a81f7 1900 as = MEM_ADDR_SPACE (memref);
15bbde2b 1901 if (mode == VOIDmode)
1902 mode = GET_MODE (memref);
1903 if (addr == 0)
1904 addr = XEXP (memref, 0);
3988ef8b 1905 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
bd1a81f7 1906 && (!validate || memory_address_addr_space_p (mode, addr, as)))
3988ef8b 1907 return memref;
15bbde2b 1908
e4e86ec5 1909 if (validate)
15bbde2b 1910 {
e4e86ec5 1911 if (reload_in_progress || reload_completed)
bd1a81f7 1912 gcc_assert (memory_address_addr_space_p (mode, addr, as));
e4e86ec5 1913 else
bd1a81f7 1914 addr = memory_address_addr_space (mode, addr, as);
15bbde2b 1915 }
d823ba47 1916
e8976cd7 1917 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1918 return memref;
1919
9ce37fa7 1920 new_rtx = gen_rtx_MEM (mode, addr);
1921 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1922 return new_rtx;
15bbde2b 1923}
537ffcfc 1924
96216d37 1925/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1926 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 1927
1928rtx
35cb5232 1929change_address (rtx memref, enum machine_mode mode, rtx addr)
e513d163 1930{
9ce37fa7 1931 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1932 enum machine_mode mmode = GET_MODE (new_rtx);
0ab96142 1933 unsigned int align;
1934
1935 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1936 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
6cc60c4d 1937
d28edf0d 1938 /* If there are no changes, just return the original memory reference. */
9ce37fa7 1939 if (new_rtx == memref)
0ab96142 1940 {
1941 if (MEM_ATTRS (memref) == 0
1942 || (MEM_EXPR (memref) == NULL
1943 && MEM_OFFSET (memref) == NULL
1944 && MEM_SIZE (memref) == size
1945 && MEM_ALIGN (memref) == align))
9ce37fa7 1946 return new_rtx;
0ab96142 1947
9ce37fa7 1948 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1949 MEM_COPY_ATTRIBUTES (new_rtx, memref);
0ab96142 1950 }
d28edf0d 1951
9ce37fa7 1952 MEM_ATTRS (new_rtx)
bd1a81f7 1953 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1954 MEM_ADDR_SPACE (memref), mmode);
fb257ae6 1955
9ce37fa7 1956 return new_rtx;
e513d163 1957}
537ffcfc 1958
96216d37 1959/* Return a memory reference like MEMREF, but with its mode changed
1960 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 1961 nonzero, the memory address is forced to be valid.
1962 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1963 and caller is responsible for adjusting MEMREF base register. */
e4e86ec5 1964
1965rtx
35cb5232 1966adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1967 int validate, int adjust)
e4e86ec5 1968{
fb257ae6 1969 rtx addr = XEXP (memref, 0);
9ce37fa7 1970 rtx new_rtx;
96216d37 1971 rtx memoffset = MEM_OFFSET (memref);
5cc193e7 1972 rtx size = 0;
96216d37 1973 unsigned int memalign = MEM_ALIGN (memref);
bd1a81f7 1974 addr_space_t as = MEM_ADDR_SPACE (memref);
98155838 1975 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
cfb75cdf 1976 int pbits;
fb257ae6 1977
d28edf0d 1978 /* If there are no changes, just return the original memory reference. */
1979 if (mode == GET_MODE (memref) && !offset
bd1a81f7 1980 && (!validate || memory_address_addr_space_p (mode, addr, as)))
d28edf0d 1981 return memref;
1982
e36c3d58 1983 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 1984 This may happen even if offset is nonzero -- consider
e36c3d58 1985 (plus (plus reg reg) const_int) -- so do this always. */
1986 addr = copy_rtx (addr);
1987
cfb75cdf 1988 /* Convert a possibly large offset to a signed value within the
1989 range of the target address space. */
98155838 1990 pbits = GET_MODE_BITSIZE (address_mode);
cfb75cdf 1991 if (HOST_BITS_PER_WIDE_INT > pbits)
1992 {
1993 int shift = HOST_BITS_PER_WIDE_INT - pbits;
1994 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
1995 >> shift);
1996 }
1997
cd358719 1998 if (adjust)
1999 {
2000 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2001 object, we can merge it into the LO_SUM. */
2002 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2003 && offset >= 0
2004 && (unsigned HOST_WIDE_INT) offset
2005 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
98155838 2006 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
cd358719 2007 plus_constant (XEXP (addr, 1), offset));
2008 else
2009 addr = plus_constant (addr, offset);
2010 }
fb257ae6 2011
9ce37fa7 2012 new_rtx = change_address_1 (memref, mode, addr, validate);
96216d37 2013
e077413c 2014 /* If the address is a REG, change_address_1 rightfully returns memref,
2015 but this would destroy memref's MEM_ATTRS. */
2016 if (new_rtx == memref && offset != 0)
2017 new_rtx = copy_rtx (new_rtx);
2018
96216d37 2019 /* Compute the new values of the memory attributes due to this adjustment.
2020 We add the offsets and update the alignment. */
2021 if (memoffset)
2022 memoffset = GEN_INT (offset + INTVAL (memoffset));
2023
b8098e5b 2024 /* Compute the new alignment by taking the MIN of the alignment and the
2025 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2026 if zero. */
2027 if (offset != 0)
f4aee538 2028 memalign
2029 = MIN (memalign,
2030 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
96216d37 2031
5cc193e7 2032 /* We can compute the size in a number of ways. */
9ce37fa7 2033 if (GET_MODE (new_rtx) != BLKmode)
2034 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
5cc193e7 2035 else if (MEM_SIZE (memref))
2036 size = plus_constant (MEM_SIZE (memref), -offset);
2037
9ce37fa7 2038 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
bd1a81f7 2039 memoffset, size, memalign, as,
2040 GET_MODE (new_rtx));
96216d37 2041
2042 /* At some point, we should validate that this offset is within the object,
2043 if all the appropriate values are known. */
9ce37fa7 2044 return new_rtx;
e4e86ec5 2045}
2046
bf42c62d 2047/* Return a memory reference like MEMREF, but with its mode changed
2048 to MODE and its address changed to ADDR, which is assumed to be
f0b5f617 2049 MEMREF offset by OFFSET bytes. If VALIDATE is
bf42c62d 2050 nonzero, the memory address is forced to be valid. */
2051
2052rtx
35cb5232 2053adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2054 HOST_WIDE_INT offset, int validate)
bf42c62d 2055{
2056 memref = change_address_1 (memref, VOIDmode, addr, validate);
2057 return adjust_address_1 (memref, mode, offset, validate, 0);
2058}
2059
2a631e19 2060/* Return a memory reference like MEMREF, but whose address is changed by
2061 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2062 known to be in OFFSET (possibly 1). */
fcdc122e 2063
2064rtx
35cb5232 2065offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
fcdc122e 2066{
9ce37fa7 2067 rtx new_rtx, addr = XEXP (memref, 0);
bd1a81f7 2068 addr_space_t as = MEM_ADDR_SPACE (memref);
98155838 2069 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
fac6aae6 2070
98155838 2071 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2072
d4c5e26d 2073 /* At this point we don't know _why_ the address is invalid. It
917bbcab 2074 could have secondary memory references, multiplies or anything.
fac6aae6 2075
2076 However, if we did go and rearrange things, we can wind up not
2077 being able to recognize the magic around pic_offset_table_rtx.
2078 This stuff is fragile, and is yet another example of why it is
2079 bad to expose PIC machinery too early. */
bd1a81f7 2080 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
fac6aae6 2081 && GET_CODE (addr) == PLUS
2082 && XEXP (addr, 0) == pic_offset_table_rtx)
2083 {
2084 addr = force_reg (GET_MODE (addr), addr);
98155838 2085 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2086 }
2087
9ce37fa7 2088 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2089 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
fcdc122e 2090
d28edf0d 2091 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2092 if (new_rtx == memref)
2093 return new_rtx;
d28edf0d 2094
fcdc122e 2095 /* Update the alignment to reflect the offset. Reset the offset, which
2096 we don't know. */
9ce37fa7 2097 MEM_ATTRS (new_rtx)
80fabb90 2098 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
84130727 2099 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
bd1a81f7 2100 as, GET_MODE (new_rtx));
9ce37fa7 2101 return new_rtx;
fcdc122e 2102}
d4c5e26d 2103
537ffcfc 2104/* Return a memory reference like MEMREF, but with its address changed to
2105 ADDR. The caller is asserting that the actual piece of memory pointed
2106 to is the same, just the form of the address is being changed, such as
2107 by putting something into a register. */
2108
2109rtx
35cb5232 2110replace_equiv_address (rtx memref, rtx addr)
537ffcfc 2111{
96216d37 2112 /* change_address_1 copies the memory attribute structure without change
2113 and that's exactly what we want here. */
ecfe4ca9 2114 update_temp_slot_address (XEXP (memref, 0), addr);
96216d37 2115 return change_address_1 (memref, VOIDmode, addr, 1);
537ffcfc 2116}
96216d37 2117
e4e86ec5 2118/* Likewise, but the reference is not required to be valid. */
2119
2120rtx
35cb5232 2121replace_equiv_address_nv (rtx memref, rtx addr)
e4e86ec5 2122{
e4e86ec5 2123 return change_address_1 (memref, VOIDmode, addr, 0);
2124}
8259ab07 2125
2126/* Return a memory reference like MEMREF, but with its mode widened to
2127 MODE and offset by OFFSET. This would be used by targets that e.g.
2128 cannot issue QImode memory operations and have to use SImode memory
2129 operations plus masking logic. */
2130
2131rtx
35cb5232 2132widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
8259ab07 2133{
9ce37fa7 2134 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2135 tree expr = MEM_EXPR (new_rtx);
2136 rtx memoffset = MEM_OFFSET (new_rtx);
8259ab07 2137 unsigned int size = GET_MODE_SIZE (mode);
2138
d28edf0d 2139 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2140 if (new_rtx == memref)
2141 return new_rtx;
d28edf0d 2142
8259ab07 2143 /* If we don't know what offset we were at within the expression, then
2144 we can't know if we've overstepped the bounds. */
22ee087b 2145 if (! memoffset)
8259ab07 2146 expr = NULL_TREE;
2147
2148 while (expr)
2149 {
2150 if (TREE_CODE (expr) == COMPONENT_REF)
2151 {
2152 tree field = TREE_OPERAND (expr, 1);
6374121b 2153 tree offset = component_ref_field_offset (expr);
8259ab07 2154
2155 if (! DECL_SIZE_UNIT (field))
2156 {
2157 expr = NULL_TREE;
2158 break;
2159 }
2160
2161 /* Is the field at least as large as the access? If so, ok,
2162 otherwise strip back to the containing structure. */
8359cfb4 2163 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2164 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
8259ab07 2165 && INTVAL (memoffset) >= 0)
2166 break;
2167
6374121b 2168 if (! host_integerp (offset, 1))
8259ab07 2169 {
2170 expr = NULL_TREE;
2171 break;
2172 }
2173
2174 expr = TREE_OPERAND (expr, 0);
6374121b 2175 memoffset
2176 = (GEN_INT (INTVAL (memoffset)
2177 + tree_low_cst (offset, 1)
2178 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2179 / BITS_PER_UNIT)));
8259ab07 2180 }
2181 /* Similarly for the decl. */
2182 else if (DECL_P (expr)
2183 && DECL_SIZE_UNIT (expr)
40c4e66e 2184 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
8259ab07 2185 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2186 && (! memoffset || INTVAL (memoffset) >= 0))
2187 break;
2188 else
2189 {
2190 /* The widened memory access overflows the expression, which means
2191 that it could alias another expression. Zap it. */
2192 expr = NULL_TREE;
2193 break;
2194 }
2195 }
2196
2197 if (! expr)
2198 memoffset = NULL_RTX;
2199
2200 /* The widened memory may alias other stuff, so zap the alias set. */
2201 /* ??? Maybe use get_alias_set on any remaining expression. */
2202
9ce37fa7 2203 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
bd1a81f7 2204 MEM_ALIGN (new_rtx),
2205 MEM_ADDR_SPACE (new_rtx), mode);
8259ab07 2206
9ce37fa7 2207 return new_rtx;
8259ab07 2208}
15bbde2b 2209\f
ac681e84 2210/* A fake decl that is used as the MEM_EXPR of spill slots. */
2211static GTY(()) tree spill_slot_decl;
2212
58029e61 2213tree
2214get_spill_slot_decl (bool force_build_p)
ac681e84 2215{
2216 tree d = spill_slot_decl;
2217 rtx rd;
2218
58029e61 2219 if (d || !force_build_p)
ac681e84 2220 return d;
2221
e60a6f7b 2222 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2223 VAR_DECL, get_identifier ("%sfp"), void_type_node);
ac681e84 2224 DECL_ARTIFICIAL (d) = 1;
2225 DECL_IGNORED_P (d) = 1;
2226 TREE_USED (d) = 1;
2227 TREE_THIS_NOTRAP (d) = 1;
2228 spill_slot_decl = d;
2229
2230 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2231 MEM_NOTRAP_P (rd) = 1;
2232 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
bd1a81f7 2233 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
ac681e84 2234 SET_DECL_RTL (d, rd);
2235
2236 return d;
2237}
2238
2239/* Given MEM, a result from assign_stack_local, fill in the memory
2240 attributes as appropriate for a register allocator spill slot.
2241 These slots are not aliasable by other memory. We arrange for
2242 them all to use a single MEM_EXPR, so that the aliasing code can
2243 work properly in the case of shared spill slots. */
2244
2245void
2246set_mem_attrs_for_spill (rtx mem)
2247{
2248 alias_set_type alias;
2249 rtx addr, offset;
2250 tree expr;
2251
58029e61 2252 expr = get_spill_slot_decl (true);
ac681e84 2253 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2254
2255 /* We expect the incoming memory to be of the form:
2256 (mem:MODE (plus (reg sfp) (const_int offset)))
2257 with perhaps the plus missing for offset = 0. */
2258 addr = XEXP (mem, 0);
2259 offset = const0_rtx;
2260 if (GET_CODE (addr) == PLUS
971ba038 2261 && CONST_INT_P (XEXP (addr, 1)))
ac681e84 2262 offset = XEXP (addr, 1);
2263
2264 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2265 MEM_SIZE (mem), MEM_ALIGN (mem),
bd1a81f7 2266 ADDR_SPACE_GENERIC, GET_MODE (mem));
ac681e84 2267 MEM_NOTRAP_P (mem) = 1;
2268}
2269\f
15bbde2b 2270/* Return a newly created CODE_LABEL rtx with a unique label number. */
2271
2272rtx
35cb5232 2273gen_label_rtx (void)
15bbde2b 2274{
a7ae1e59 2275 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
35cb5232 2276 NULL, label_num++, NULL);
15bbde2b 2277}
2278\f
2279/* For procedure integration. */
2280
15bbde2b 2281/* Install new pointers to the first and last insns in the chain.
d4c332ff 2282 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2283 Used for an inline-procedure after copying the insn chain. */
2284
2285void
35cb5232 2286set_new_first_and_last_insn (rtx first, rtx last)
15bbde2b 2287{
d4c332ff 2288 rtx insn;
2289
15bbde2b 2290 first_insn = first;
2291 last_insn = last;
d4c332ff 2292 cur_insn_uid = 0;
2293
9845d120 2294 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2295 {
2296 int debug_count = 0;
2297
2298 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2299 cur_debug_insn_uid = 0;
2300
2301 for (insn = first; insn; insn = NEXT_INSN (insn))
2302 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2303 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2304 else
2305 {
2306 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2307 if (DEBUG_INSN_P (insn))
2308 debug_count++;
2309 }
2310
2311 if (debug_count)
2312 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2313 else
2314 cur_debug_insn_uid++;
2315 }
2316 else
2317 for (insn = first; insn; insn = NEXT_INSN (insn))
2318 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
d4c332ff 2319
2320 cur_insn_uid++;
15bbde2b 2321}
15bbde2b 2322\f
d823ba47 2323/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2324 structure. This routine should only be called once. */
15bbde2b 2325
a40c0eeb 2326static void
df329266 2327unshare_all_rtl_1 (rtx insn)
15bbde2b 2328{
2d96a59a 2329 /* Unshare just about everything else. */
1cd4cfea 2330 unshare_all_rtl_in_chain (insn);
d823ba47 2331
15bbde2b 2332 /* Make sure the addresses of stack slots found outside the insn chain
2333 (such as, in DECL_RTL of a variable) are not shared
2334 with the insn chain.
2335
2336 This special care is necessary when the stack slot MEM does not
2337 actually appear in the insn chain. If it does appear, its address
2338 is unshared from all else at that point. */
45733446 2339 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
15bbde2b 2340}
2341
d823ba47 2342/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2343 structure, again. This is a fairly expensive thing to do so it
2344 should be done sparingly. */
2345
2346void
35cb5232 2347unshare_all_rtl_again (rtx insn)
2d96a59a 2348{
2349 rtx p;
5244079b 2350 tree decl;
2351
2d96a59a 2352 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2353 if (INSN_P (p))
2d96a59a 2354 {
2355 reset_used_flags (PATTERN (p));
2356 reset_used_flags (REG_NOTES (p));
2d96a59a 2357 }
5244079b 2358
01dc9f0c 2359 /* Make sure that virtual stack slots are not shared. */
265be050 2360 set_used_decls (DECL_INITIAL (cfun->decl));
01dc9f0c 2361
5244079b 2362 /* Make sure that virtual parameters are not shared. */
2363 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
265be050 2364 set_used_flags (DECL_RTL (decl));
5244079b 2365
2366 reset_used_flags (stack_slot_list);
2367
df329266 2368 unshare_all_rtl_1 (insn);
a40c0eeb 2369}
2370
2a1990e9 2371unsigned int
a40c0eeb 2372unshare_all_rtl (void)
2373{
df329266 2374 unshare_all_rtl_1 (get_insns ());
2a1990e9 2375 return 0;
2d96a59a 2376}
2377
20099e35 2378struct rtl_opt_pass pass_unshare_all_rtl =
77fce4cd 2379{
20099e35 2380 {
2381 RTL_PASS,
228967a9 2382 "unshare", /* name */
77fce4cd 2383 NULL, /* gate */
2384 unshare_all_rtl, /* execute */
2385 NULL, /* sub */
2386 NULL, /* next */
2387 0, /* static_pass_number */
0b1615c1 2388 TV_NONE, /* tv_id */
77fce4cd 2389 0, /* properties_required */
2390 0, /* properties_provided */
2391 0, /* properties_destroyed */
2392 0, /* todo_flags_start */
20099e35 2393 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2394 }
77fce4cd 2395};
2396
2397
1cd4cfea 2398/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2399 Recursively does the same for subexpressions. */
2400
2401static void
2402verify_rtx_sharing (rtx orig, rtx insn)
2403{
2404 rtx x = orig;
2405 int i;
2406 enum rtx_code code;
2407 const char *format_ptr;
2408
2409 if (x == 0)
2410 return;
2411
2412 code = GET_CODE (x);
2413
2414 /* These types may be freely shared. */
2415
2416 switch (code)
2417 {
2418 case REG:
688ff29b 2419 case DEBUG_EXPR:
2420 case VALUE:
1cd4cfea 2421 case CONST_INT:
2422 case CONST_DOUBLE:
e397ad8e 2423 case CONST_FIXED:
1cd4cfea 2424 case CONST_VECTOR:
2425 case SYMBOL_REF:
2426 case LABEL_REF:
2427 case CODE_LABEL:
2428 case PC:
2429 case CC0:
2430 case SCRATCH:
1cd4cfea 2431 return;
c09425a0 2432 /* SCRATCH must be shared because they represent distinct values. */
2433 case CLOBBER:
2434 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2435 return;
2436 break;
1cd4cfea 2437
2438 case CONST:
3072d30e 2439 if (shared_const_p (orig))
1cd4cfea 2440 return;
2441 break;
2442
2443 case MEM:
2444 /* A MEM is allowed to be shared if its address is constant. */
2445 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2446 || reload_completed || reload_in_progress)
2447 return;
2448
2449 break;
2450
2451 default:
2452 break;
2453 }
2454
2455 /* This rtx may not be shared. If it has already been seen,
2456 replace it with a copy of itself. */
9cee7c3f 2457#ifdef ENABLE_CHECKING
1cd4cfea 2458 if (RTX_FLAG (x, used))
2459 {
0a81f5a0 2460 error ("invalid rtl sharing found in the insn");
1cd4cfea 2461 debug_rtx (insn);
0a81f5a0 2462 error ("shared rtx");
1cd4cfea 2463 debug_rtx (x);
0a81f5a0 2464 internal_error ("internal consistency failure");
1cd4cfea 2465 }
9cee7c3f 2466#endif
2467 gcc_assert (!RTX_FLAG (x, used));
2468
1cd4cfea 2469 RTX_FLAG (x, used) = 1;
2470
8b332087 2471 /* Now scan the subexpressions recursively. */
1cd4cfea 2472
2473 format_ptr = GET_RTX_FORMAT (code);
2474
2475 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2476 {
2477 switch (*format_ptr++)
2478 {
2479 case 'e':
2480 verify_rtx_sharing (XEXP (x, i), insn);
2481 break;
2482
2483 case 'E':
2484 if (XVEC (x, i) != NULL)
2485 {
2486 int j;
2487 int len = XVECLEN (x, i);
2488
2489 for (j = 0; j < len; j++)
2490 {
9cee7c3f 2491 /* We allow sharing of ASM_OPERANDS inside single
2492 instruction. */
1cd4cfea 2493 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
9cee7c3f 2494 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2495 == ASM_OPERANDS))
1cd4cfea 2496 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2497 else
2498 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2499 }
2500 }
2501 break;
2502 }
2503 }
2504 return;
2505}
2506
c7bf1374 2507/* Go through all the RTL insn bodies and check that there is no unexpected
1cd4cfea 2508 sharing in between the subexpressions. */
2509
2510void
2511verify_rtl_sharing (void)
2512{
2513 rtx p;
2514
2515 for (p = get_insns (); p; p = NEXT_INSN (p))
2516 if (INSN_P (p))
2517 {
2518 reset_used_flags (PATTERN (p));
2519 reset_used_flags (REG_NOTES (p));
764f640f 2520 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2521 {
2522 int i;
2523 rtx q, sequence = PATTERN (p);
2524
2525 for (i = 0; i < XVECLEN (sequence, 0); i++)
2526 {
2527 q = XVECEXP (sequence, 0, i);
2528 gcc_assert (INSN_P (q));
2529 reset_used_flags (PATTERN (q));
2530 reset_used_flags (REG_NOTES (q));
764f640f 2531 }
2532 }
1cd4cfea 2533 }
2534
2535 for (p = get_insns (); p; p = NEXT_INSN (p))
2536 if (INSN_P (p))
2537 {
2538 verify_rtx_sharing (PATTERN (p), p);
2539 verify_rtx_sharing (REG_NOTES (p), p);
1cd4cfea 2540 }
2541}
2542
2d96a59a 2543/* Go through all the RTL insn bodies and copy any invalid shared structure.
2544 Assumes the mark bits are cleared at entry. */
2545
1cd4cfea 2546void
2547unshare_all_rtl_in_chain (rtx insn)
2d96a59a 2548{
2549 for (; insn; insn = NEXT_INSN (insn))
9204e736 2550 if (INSN_P (insn))
2d96a59a 2551 {
2552 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2553 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2d96a59a 2554 }
2555}
2556
01dc9f0c 2557/* Go through all virtual stack slots of a function and mark them as
265be050 2558 shared. We never replace the DECL_RTLs themselves with a copy,
2559 but expressions mentioned into a DECL_RTL cannot be shared with
2560 expressions in the instruction stream.
2561
2562 Note that reload may convert pseudo registers into memories in-place.
2563 Pseudo registers are always shared, but MEMs never are. Thus if we
2564 reset the used flags on MEMs in the instruction stream, we must set
2565 them again on MEMs that appear in DECL_RTLs. */
2566
01dc9f0c 2567static void
265be050 2568set_used_decls (tree blk)
01dc9f0c 2569{
2570 tree t;
2571
2572 /* Mark decls. */
2573 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
0e8e37b2 2574 if (DECL_RTL_SET_P (t))
265be050 2575 set_used_flags (DECL_RTL (t));
01dc9f0c 2576
2577 /* Now process sub-blocks. */
93110716 2578 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
265be050 2579 set_used_decls (t);
01dc9f0c 2580}
2581
15bbde2b 2582/* Mark ORIG as in use, and return a copy of it if it was already in use.
7ba6ce7a 2583 Recursively does the same for subexpressions. Uses
2584 copy_rtx_if_shared_1 to reduce stack space. */
15bbde2b 2585
2586rtx
35cb5232 2587copy_rtx_if_shared (rtx orig)
15bbde2b 2588{
0e0727c4 2589 copy_rtx_if_shared_1 (&orig);
2590 return orig;
2591}
2592
7ba6ce7a 2593/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2594 use. Recursively does the same for subexpressions. */
2595
0e0727c4 2596static void
2597copy_rtx_if_shared_1 (rtx *orig1)
2598{
2599 rtx x;
19cb6b50 2600 int i;
2601 enum rtx_code code;
0e0727c4 2602 rtx *last_ptr;
19cb6b50 2603 const char *format_ptr;
15bbde2b 2604 int copied = 0;
0e0727c4 2605 int length;
2606
2607 /* Repeat is used to turn tail-recursion into iteration. */
2608repeat:
2609 x = *orig1;
15bbde2b 2610
2611 if (x == 0)
0e0727c4 2612 return;
15bbde2b 2613
2614 code = GET_CODE (x);
2615
2616 /* These types may be freely shared. */
2617
2618 switch (code)
2619 {
2620 case REG:
688ff29b 2621 case DEBUG_EXPR:
2622 case VALUE:
15bbde2b 2623 case CONST_INT:
2624 case CONST_DOUBLE:
e397ad8e 2625 case CONST_FIXED:
886cfd4f 2626 case CONST_VECTOR:
15bbde2b 2627 case SYMBOL_REF:
1cd4cfea 2628 case LABEL_REF:
15bbde2b 2629 case CODE_LABEL:
2630 case PC:
2631 case CC0:
2632 case SCRATCH:
a92771b8 2633 /* SCRATCH must be shared because they represent distinct values. */
0e0727c4 2634 return;
c09425a0 2635 case CLOBBER:
2636 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2637 return;
2638 break;
15bbde2b 2639
f63d12e3 2640 case CONST:
3072d30e 2641 if (shared_const_p (x))
0e0727c4 2642 return;
f63d12e3 2643 break;
2644
9845d120 2645 case DEBUG_INSN:
15bbde2b 2646 case INSN:
2647 case JUMP_INSN:
2648 case CALL_INSN:
2649 case NOTE:
15bbde2b 2650 case BARRIER:
2651 /* The chain of insns is not being copied. */
0e0727c4 2652 return;
15bbde2b 2653
0dbd1c74 2654 default:
2655 break;
15bbde2b 2656 }
2657
2658 /* This rtx may not be shared. If it has already been seen,
2659 replace it with a copy of itself. */
2660
7c25cb91 2661 if (RTX_FLAG (x, used))
15bbde2b 2662 {
f2d0e9f1 2663 x = shallow_copy_rtx (x);
15bbde2b 2664 copied = 1;
2665 }
7c25cb91 2666 RTX_FLAG (x, used) = 1;
15bbde2b 2667
2668 /* Now scan the subexpressions recursively.
2669 We can store any replaced subexpressions directly into X
2670 since we know X is not shared! Any vectors in X
2671 must be copied if X was copied. */
2672
2673 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2674 length = GET_RTX_LENGTH (code);
2675 last_ptr = NULL;
2676
2677 for (i = 0; i < length; i++)
15bbde2b 2678 {
2679 switch (*format_ptr++)
2680 {
2681 case 'e':
0e0727c4 2682 if (last_ptr)
2683 copy_rtx_if_shared_1 (last_ptr);
2684 last_ptr = &XEXP (x, i);
15bbde2b 2685 break;
2686
2687 case 'E':
2688 if (XVEC (x, i) != NULL)
2689 {
19cb6b50 2690 int j;
ffe0869b 2691 int len = XVECLEN (x, i);
0e0727c4 2692
8b332087 2693 /* Copy the vector iff I copied the rtx and the length
2694 is nonzero. */
ffe0869b 2695 if (copied && len > 0)
a4070a91 2696 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
0e0727c4 2697
d632b59a 2698 /* Call recursively on all inside the vector. */
ffe0869b 2699 for (j = 0; j < len; j++)
0e0727c4 2700 {
2701 if (last_ptr)
2702 copy_rtx_if_shared_1 (last_ptr);
2703 last_ptr = &XVECEXP (x, i, j);
2704 }
15bbde2b 2705 }
2706 break;
2707 }
2708 }
0e0727c4 2709 *orig1 = x;
2710 if (last_ptr)
2711 {
2712 orig1 = last_ptr;
2713 goto repeat;
2714 }
2715 return;
15bbde2b 2716}
2717
2718/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2719 to look for shared sub-parts. */
2720
2721void
35cb5232 2722reset_used_flags (rtx x)
15bbde2b 2723{
19cb6b50 2724 int i, j;
2725 enum rtx_code code;
2726 const char *format_ptr;
0e0727c4 2727 int length;
15bbde2b 2728
0e0727c4 2729 /* Repeat is used to turn tail-recursion into iteration. */
2730repeat:
15bbde2b 2731 if (x == 0)
2732 return;
2733
2734 code = GET_CODE (x);
2735
c3418f42 2736 /* These types may be freely shared so we needn't do any resetting
15bbde2b 2737 for them. */
2738
2739 switch (code)
2740 {
2741 case REG:
688ff29b 2742 case DEBUG_EXPR:
2743 case VALUE:
15bbde2b 2744 case CONST_INT:
2745 case CONST_DOUBLE:
e397ad8e 2746 case CONST_FIXED:
886cfd4f 2747 case CONST_VECTOR:
15bbde2b 2748 case SYMBOL_REF:
2749 case CODE_LABEL:
2750 case PC:
2751 case CC0:
2752 return;
2753
9845d120 2754 case DEBUG_INSN:
15bbde2b 2755 case INSN:
2756 case JUMP_INSN:
2757 case CALL_INSN:
2758 case NOTE:
2759 case LABEL_REF:
2760 case BARRIER:
2761 /* The chain of insns is not being copied. */
2762 return;
d823ba47 2763
0dbd1c74 2764 default:
2765 break;
15bbde2b 2766 }
2767
7c25cb91 2768 RTX_FLAG (x, used) = 0;
15bbde2b 2769
2770 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2771 length = GET_RTX_LENGTH (code);
2772
2773 for (i = 0; i < length; i++)
15bbde2b 2774 {
2775 switch (*format_ptr++)
2776 {
2777 case 'e':
0e0727c4 2778 if (i == length-1)
2779 {
2780 x = XEXP (x, i);
2781 goto repeat;
2782 }
15bbde2b 2783 reset_used_flags (XEXP (x, i));
2784 break;
2785
2786 case 'E':
2787 for (j = 0; j < XVECLEN (x, i); j++)
2788 reset_used_flags (XVECEXP (x, i, j));
2789 break;
2790 }
2791 }
2792}
1cd4cfea 2793
2794/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2795 to look for shared sub-parts. */
2796
2797void
2798set_used_flags (rtx x)
2799{
2800 int i, j;
2801 enum rtx_code code;
2802 const char *format_ptr;
2803
2804 if (x == 0)
2805 return;
2806
2807 code = GET_CODE (x);
2808
2809 /* These types may be freely shared so we needn't do any resetting
2810 for them. */
2811
2812 switch (code)
2813 {
2814 case REG:
688ff29b 2815 case DEBUG_EXPR:
2816 case VALUE:
1cd4cfea 2817 case CONST_INT:
2818 case CONST_DOUBLE:
e397ad8e 2819 case CONST_FIXED:
1cd4cfea 2820 case CONST_VECTOR:
2821 case SYMBOL_REF:
2822 case CODE_LABEL:
2823 case PC:
2824 case CC0:
2825 return;
2826
9845d120 2827 case DEBUG_INSN:
1cd4cfea 2828 case INSN:
2829 case JUMP_INSN:
2830 case CALL_INSN:
2831 case NOTE:
2832 case LABEL_REF:
2833 case BARRIER:
2834 /* The chain of insns is not being copied. */
2835 return;
2836
2837 default:
2838 break;
2839 }
2840
2841 RTX_FLAG (x, used) = 1;
2842
2843 format_ptr = GET_RTX_FORMAT (code);
2844 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2845 {
2846 switch (*format_ptr++)
2847 {
2848 case 'e':
2849 set_used_flags (XEXP (x, i));
2850 break;
2851
2852 case 'E':
2853 for (j = 0; j < XVECLEN (x, i); j++)
2854 set_used_flags (XVECEXP (x, i, j));
2855 break;
2856 }
2857 }
2858}
15bbde2b 2859\f
2860/* Copy X if necessary so that it won't be altered by changes in OTHER.
2861 Return X or the rtx for the pseudo reg the value of X was copied into.
2862 OTHER must be valid as a SET_DEST. */
2863
2864rtx
35cb5232 2865make_safe_from (rtx x, rtx other)
15bbde2b 2866{
2867 while (1)
2868 switch (GET_CODE (other))
2869 {
2870 case SUBREG:
2871 other = SUBREG_REG (other);
2872 break;
2873 case STRICT_LOW_PART:
2874 case SIGN_EXTEND:
2875 case ZERO_EXTEND:
2876 other = XEXP (other, 0);
2877 break;
2878 default:
2879 goto done;
2880 }
2881 done:
e16ceb8e 2882 if ((MEM_P (other)
15bbde2b 2883 && ! CONSTANT_P (x)
8ad4c111 2884 && !REG_P (x)
15bbde2b 2885 && GET_CODE (x) != SUBREG)
8ad4c111 2886 || (REG_P (other)
15bbde2b 2887 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2888 || reg_mentioned_p (other, x))))
2889 {
2890 rtx temp = gen_reg_rtx (GET_MODE (x));
2891 emit_move_insn (temp, x);
2892 return temp;
2893 }
2894 return x;
2895}
2896\f
2897/* Emission of insns (adding them to the doubly-linked list). */
2898
2899/* Return the first insn of the current sequence or current function. */
2900
2901rtx
35cb5232 2902get_insns (void)
15bbde2b 2903{
2904 return first_insn;
2905}
2906
fb20d6fa 2907/* Specify a new insn as the first in the chain. */
2908
2909void
35cb5232 2910set_first_insn (rtx insn)
fb20d6fa 2911{
611234b4 2912 gcc_assert (!PREV_INSN (insn));
fb20d6fa 2913 first_insn = insn;
2914}
2915
15bbde2b 2916/* Return the last insn emitted in current sequence or current function. */
2917
2918rtx
35cb5232 2919get_last_insn (void)
15bbde2b 2920{
2921 return last_insn;
2922}
2923
2924/* Specify a new insn as the last in the chain. */
2925
2926void
35cb5232 2927set_last_insn (rtx insn)
15bbde2b 2928{
611234b4 2929 gcc_assert (!NEXT_INSN (insn));
15bbde2b 2930 last_insn = insn;
2931}
2932
2933/* Return the last insn emitted, even if it is in a sequence now pushed. */
2934
2935rtx
35cb5232 2936get_last_insn_anywhere (void)
15bbde2b 2937{
2938 struct sequence_stack *stack;
2939 if (last_insn)
2940 return last_insn;
0a893c29 2941 for (stack = seq_stack; stack; stack = stack->next)
15bbde2b 2942 if (stack->last != 0)
2943 return stack->last;
2944 return 0;
2945}
2946
70545de4 2947/* Return the first nonnote insn emitted in current sequence or current
2948 function. This routine looks inside SEQUENCEs. */
2949
2950rtx
35cb5232 2951get_first_nonnote_insn (void)
70545de4 2952{
f86e856e 2953 rtx insn = first_insn;
2954
2955 if (insn)
2956 {
2957 if (NOTE_P (insn))
2958 for (insn = next_insn (insn);
2959 insn && NOTE_P (insn);
2960 insn = next_insn (insn))
2961 continue;
2962 else
2963 {
1c14a50e 2964 if (NONJUMP_INSN_P (insn)
f86e856e 2965 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2966 insn = XVECEXP (PATTERN (insn), 0, 0);
2967 }
2968 }
70545de4 2969
2970 return insn;
2971}
2972
2973/* Return the last nonnote insn emitted in current sequence or current
2974 function. This routine looks inside SEQUENCEs. */
2975
2976rtx
35cb5232 2977get_last_nonnote_insn (void)
70545de4 2978{
f86e856e 2979 rtx insn = last_insn;
2980
2981 if (insn)
2982 {
2983 if (NOTE_P (insn))
2984 for (insn = previous_insn (insn);
2985 insn && NOTE_P (insn);
2986 insn = previous_insn (insn))
2987 continue;
2988 else
2989 {
1c14a50e 2990 if (NONJUMP_INSN_P (insn)
f86e856e 2991 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2992 insn = XVECEXP (PATTERN (insn), 0,
2993 XVECLEN (PATTERN (insn), 0) - 1);
2994 }
2995 }
70545de4 2996
2997 return insn;
2998}
2999
15bbde2b 3000/* Return a number larger than any instruction's uid in this function. */
3001
3002int
35cb5232 3003get_max_uid (void)
15bbde2b 3004{
3005 return cur_insn_uid;
3006}
9845d120 3007
3008/* Return the number of actual (non-debug) insns emitted in this
3009 function. */
3010
3011int
3012get_max_insn_count (void)
3013{
3014 int n = cur_insn_uid;
3015
3016 /* The table size must be stable across -g, to avoid codegen
3017 differences due to debug insns, and not be affected by
3018 -fmin-insn-uid, to avoid excessive table size and to simplify
3019 debugging of -fcompare-debug failures. */
3020 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3021 n -= cur_debug_insn_uid;
3022 else
3023 n -= MIN_NONDEBUG_INSN_UID;
3024
3025 return n;
3026}
3027
15bbde2b 3028\f
3029/* Return the next insn. If it is a SEQUENCE, return the first insn
3030 of the sequence. */
3031
3032rtx
35cb5232 3033next_insn (rtx insn)
15bbde2b 3034{
ce4469fa 3035 if (insn)
3036 {
3037 insn = NEXT_INSN (insn);
3038 if (insn && NONJUMP_INSN_P (insn)
3039 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3040 insn = XVECEXP (PATTERN (insn), 0, 0);
3041 }
15bbde2b 3042
ce4469fa 3043 return insn;
15bbde2b 3044}
3045
3046/* Return the previous insn. If it is a SEQUENCE, return the last insn
3047 of the sequence. */
3048
3049rtx
35cb5232 3050previous_insn (rtx insn)
15bbde2b 3051{
ce4469fa 3052 if (insn)
3053 {
3054 insn = PREV_INSN (insn);
3055 if (insn && NONJUMP_INSN_P (insn)
3056 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3057 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3058 }
15bbde2b 3059
ce4469fa 3060 return insn;
15bbde2b 3061}
3062
3063/* Return the next insn after INSN that is not a NOTE. This routine does not
3064 look inside SEQUENCEs. */
3065
3066rtx
35cb5232 3067next_nonnote_insn (rtx insn)
15bbde2b 3068{
ce4469fa 3069 while (insn)
3070 {
3071 insn = NEXT_INSN (insn);
3072 if (insn == 0 || !NOTE_P (insn))
3073 break;
3074 }
15bbde2b 3075
ce4469fa 3076 return insn;
15bbde2b 3077}
3078
c4d13c5c 3079/* Return the next insn after INSN that is not a NOTE, but stop the
3080 search before we enter another basic block. This routine does not
3081 look inside SEQUENCEs. */
3082
3083rtx
3084next_nonnote_insn_bb (rtx insn)
3085{
3086 while (insn)
3087 {
3088 insn = NEXT_INSN (insn);
3089 if (insn == 0 || !NOTE_P (insn))
3090 break;
3091 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3092 return NULL_RTX;
3093 }
3094
3095 return insn;
3096}
3097
15bbde2b 3098/* Return the previous insn before INSN that is not a NOTE. This routine does
3099 not look inside SEQUENCEs. */
3100
3101rtx
35cb5232 3102prev_nonnote_insn (rtx insn)
15bbde2b 3103{
ce4469fa 3104 while (insn)
3105 {
3106 insn = PREV_INSN (insn);
3107 if (insn == 0 || !NOTE_P (insn))
3108 break;
3109 }
15bbde2b 3110
ce4469fa 3111 return insn;
15bbde2b 3112}
3113
bcc66782 3114/* Return the previous insn before INSN that is not a NOTE, but stop
3115 the search before we enter another basic block. This routine does
3116 not look inside SEQUENCEs. */
3117
3118rtx
3119prev_nonnote_insn_bb (rtx insn)
3120{
3121 while (insn)
3122 {
3123 insn = PREV_INSN (insn);
3124 if (insn == 0 || !NOTE_P (insn))
3125 break;
3126 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3127 return NULL_RTX;
3128 }
3129
3130 return insn;
3131}
3132
9845d120 3133/* Return the next insn after INSN that is not a DEBUG_INSN. This
3134 routine does not look inside SEQUENCEs. */
3135
3136rtx
3137next_nondebug_insn (rtx insn)
3138{
3139 while (insn)
3140 {
3141 insn = NEXT_INSN (insn);
3142 if (insn == 0 || !DEBUG_INSN_P (insn))
3143 break;
3144 }
3145
3146 return insn;
3147}
3148
3149/* Return the previous insn before INSN that is not a DEBUG_INSN.
3150 This routine does not look inside SEQUENCEs. */
3151
3152rtx
3153prev_nondebug_insn (rtx insn)
3154{
3155 while (insn)
3156 {
3157 insn = PREV_INSN (insn);
3158 if (insn == 0 || !DEBUG_INSN_P (insn))
3159 break;
3160 }
3161
3162 return insn;
3163}
3164
15bbde2b 3165/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3166 or 0, if there is none. This routine does not look inside
a92771b8 3167 SEQUENCEs. */
15bbde2b 3168
3169rtx
35cb5232 3170next_real_insn (rtx insn)
15bbde2b 3171{
ce4469fa 3172 while (insn)
3173 {
3174 insn = NEXT_INSN (insn);
3175 if (insn == 0 || INSN_P (insn))
3176 break;
3177 }
15bbde2b 3178
ce4469fa 3179 return insn;
15bbde2b 3180}
3181
3182/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3183 or 0, if there is none. This routine does not look inside
3184 SEQUENCEs. */
3185
3186rtx
35cb5232 3187prev_real_insn (rtx insn)
15bbde2b 3188{
ce4469fa 3189 while (insn)
3190 {
3191 insn = PREV_INSN (insn);
3192 if (insn == 0 || INSN_P (insn))
3193 break;
3194 }
15bbde2b 3195
ce4469fa 3196 return insn;
15bbde2b 3197}
3198
d5f9786f 3199/* Return the last CALL_INSN in the current list, or 0 if there is none.
3200 This routine does not look inside SEQUENCEs. */
3201
3202rtx
35cb5232 3203last_call_insn (void)
d5f9786f 3204{
3205 rtx insn;
3206
3207 for (insn = get_last_insn ();
6d7dc5b9 3208 insn && !CALL_P (insn);
d5f9786f 3209 insn = PREV_INSN (insn))
3210 ;
3211
3212 return insn;
3213}
3214
15bbde2b 3215/* Find the next insn after INSN that really does something. This routine
3216 does not look inside SEQUENCEs. Until reload has completed, this is the
3217 same as next_real_insn. */
3218
2215ca0d 3219int
52d07779 3220active_insn_p (const_rtx insn)
2215ca0d 3221{
6d7dc5b9 3222 return (CALL_P (insn) || JUMP_P (insn)
3223 || (NONJUMP_INSN_P (insn)
3a66feab 3224 && (! reload_completed
3225 || (GET_CODE (PATTERN (insn)) != USE
3226 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3227}
3228
15bbde2b 3229rtx
35cb5232 3230next_active_insn (rtx insn)
15bbde2b 3231{
ce4469fa 3232 while (insn)
3233 {
3234 insn = NEXT_INSN (insn);
3235 if (insn == 0 || active_insn_p (insn))
3236 break;
3237 }
15bbde2b 3238
ce4469fa 3239 return insn;
15bbde2b 3240}
3241
3242/* Find the last insn before INSN that really does something. This routine
3243 does not look inside SEQUENCEs. Until reload has completed, this is the
3244 same as prev_real_insn. */
3245
3246rtx
35cb5232 3247prev_active_insn (rtx insn)
15bbde2b 3248{
ce4469fa 3249 while (insn)
3250 {
3251 insn = PREV_INSN (insn);
3252 if (insn == 0 || active_insn_p (insn))
3253 break;
3254 }
15bbde2b 3255
ce4469fa 3256 return insn;
15bbde2b 3257}
3258
3259/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3260
3261rtx
35cb5232 3262next_label (rtx insn)
15bbde2b 3263{
ce4469fa 3264 while (insn)
3265 {
3266 insn = NEXT_INSN (insn);
3267 if (insn == 0 || LABEL_P (insn))
3268 break;
3269 }
15bbde2b 3270
ce4469fa 3271 return insn;
15bbde2b 3272}
3273
3274/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3275
3276rtx
35cb5232 3277prev_label (rtx insn)
15bbde2b 3278{
ce4469fa 3279 while (insn)
3280 {
3281 insn = PREV_INSN (insn);
3282 if (insn == 0 || LABEL_P (insn))
3283 break;
3284 }
15bbde2b 3285
ce4469fa 3286 return insn;
15bbde2b 3287}
67c5e2a9 3288
3289/* Return the last label to mark the same position as LABEL. Return null
3290 if LABEL itself is null. */
3291
3292rtx
3293skip_consecutive_labels (rtx label)
3294{
3295 rtx insn;
3296
3297 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3298 if (LABEL_P (insn))
3299 label = insn;
3300
3301 return label;
3302}
15bbde2b 3303\f
3304#ifdef HAVE_cc0
b15e0bba 3305/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3306 and REG_CC_USER notes so we can find it. */
3307
3308void
35cb5232 3309link_cc0_insns (rtx insn)
b15e0bba 3310{
3311 rtx user = next_nonnote_insn (insn);
3312
6d7dc5b9 3313 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
b15e0bba 3314 user = XVECEXP (PATTERN (user), 0, 0);
3315
a1ddb869 3316 add_reg_note (user, REG_CC_SETTER, insn);
3317 add_reg_note (insn, REG_CC_USER, user);
b15e0bba 3318}
3319
15bbde2b 3320/* Return the next insn that uses CC0 after INSN, which is assumed to
3321 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3322 applied to the result of this function should yield INSN).
3323
3324 Normally, this is simply the next insn. However, if a REG_CC_USER note
3325 is present, it contains the insn that uses CC0.
3326
3327 Return 0 if we can't find the insn. */
3328
3329rtx
35cb5232 3330next_cc0_user (rtx insn)
15bbde2b 3331{
b572011e 3332 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3333
3334 if (note)
3335 return XEXP (note, 0);
3336
3337 insn = next_nonnote_insn (insn);
6d7dc5b9 3338 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
15bbde2b 3339 insn = XVECEXP (PATTERN (insn), 0, 0);
3340
9204e736 3341 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
15bbde2b 3342 return insn;
3343
3344 return 0;
3345}
3346
3347/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3348 note, it is the previous insn. */
3349
3350rtx
35cb5232 3351prev_cc0_setter (rtx insn)
15bbde2b 3352{
b572011e 3353 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3354
3355 if (note)
3356 return XEXP (note, 0);
3357
3358 insn = prev_nonnote_insn (insn);
611234b4 3359 gcc_assert (sets_cc0_p (PATTERN (insn)));
15bbde2b 3360
3361 return insn;
3362}
3363#endif
344dc2fa 3364
698ff1f0 3365#ifdef AUTO_INC_DEC
3366/* Find a RTX_AUTOINC class rtx which matches DATA. */
3367
3368static int
3369find_auto_inc (rtx *xp, void *data)
3370{
3371 rtx x = *xp;
225ab426 3372 rtx reg = (rtx) data;
698ff1f0 3373
3374 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3375 return 0;
3376
3377 switch (GET_CODE (x))
3378 {
3379 case PRE_DEC:
3380 case PRE_INC:
3381 case POST_DEC:
3382 case POST_INC:
3383 case PRE_MODIFY:
3384 case POST_MODIFY:
3385 if (rtx_equal_p (reg, XEXP (x, 0)))
3386 return 1;
3387 break;
3388
3389 default:
3390 gcc_unreachable ();
3391 }
3392 return -1;
3393}
3394#endif
3395
344dc2fa 3396/* Increment the label uses for all labels present in rtx. */
3397
3398static void
35cb5232 3399mark_label_nuses (rtx x)
344dc2fa 3400{
19cb6b50 3401 enum rtx_code code;
3402 int i, j;
3403 const char *fmt;
344dc2fa 3404
3405 code = GET_CODE (x);
a030d4a8 3406 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
344dc2fa 3407 LABEL_NUSES (XEXP (x, 0))++;
3408
3409 fmt = GET_RTX_FORMAT (code);
3410 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3411 {
3412 if (fmt[i] == 'e')
ff385626 3413 mark_label_nuses (XEXP (x, i));
344dc2fa 3414 else if (fmt[i] == 'E')
ff385626 3415 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3416 mark_label_nuses (XVECEXP (x, i, j));
3417 }
3418}
3419
15bbde2b 3420\f
3421/* Try splitting insns that can be split for better scheduling.
3422 PAT is the pattern which might split.
3423 TRIAL is the insn providing PAT.
6ef828f9 3424 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3425
3426 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3427 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3428 returns TRIAL. If the insn to be returned can be split, it will be. */
3429
3430rtx
35cb5232 3431try_split (rtx pat, rtx trial, int last)
15bbde2b 3432{
3433 rtx before = PREV_INSN (trial);
3434 rtx after = NEXT_INSN (trial);
15bbde2b 3435 int has_barrier = 0;
1e5b92fa 3436 rtx note, seq, tem;
3cd757b1 3437 int probability;
e13693ec 3438 rtx insn_last, insn;
3439 int njumps = 0;
3cd757b1 3440
25e880b1 3441 /* We're not good at redistributing frame information. */
3442 if (RTX_FRAME_RELATED_P (trial))
3443 return trial;
3444
3cd757b1 3445 if (any_condjump_p (trial)
3446 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3447 split_branch_probability = INTVAL (XEXP (note, 0));
3448 probability = split_branch_probability;
3449
3450 seq = split_insns (pat, trial);
3451
3452 split_branch_probability = -1;
15bbde2b 3453
3454 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3455 We may need to handle this specially. */
6d7dc5b9 3456 if (after && BARRIER_P (after))
15bbde2b 3457 {
3458 has_barrier = 1;
3459 after = NEXT_INSN (after);
3460 }
3461
e13693ec 3462 if (!seq)
3463 return trial;
3464
3465 /* Avoid infinite loop if any insn of the result matches
3466 the original pattern. */
3467 insn_last = seq;
3468 while (1)
15bbde2b 3469 {
e13693ec 3470 if (INSN_P (insn_last)
3471 && rtx_equal_p (PATTERN (insn_last), pat))
3472 return trial;
3473 if (!NEXT_INSN (insn_last))
3474 break;
3475 insn_last = NEXT_INSN (insn_last);
3476 }
d823ba47 3477
3072d30e 3478 /* We will be adding the new sequence to the function. The splitters
3479 may have introduced invalid RTL sharing, so unshare the sequence now. */
3480 unshare_all_rtl_in_chain (seq);
3481
e13693ec 3482 /* Mark labels. */
3483 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3484 {
6d7dc5b9 3485 if (JUMP_P (insn))
e13693ec 3486 {
3487 mark_jump_label (PATTERN (insn), insn, 0);
3488 njumps++;
3489 if (probability != -1
3490 && any_condjump_p (insn)
3491 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3492 {
e13693ec 3493 /* We can preserve the REG_BR_PROB notes only if exactly
3494 one jump is created, otherwise the machine description
3495 is responsible for this step using
3496 split_branch_probability variable. */
611234b4 3497 gcc_assert (njumps == 1);
a1ddb869 3498 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
31d3e01c 3499 }
e13693ec 3500 }
3501 }
3502
3503 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3504 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
6d7dc5b9 3505 if (CALL_P (trial))
e13693ec 3506 {
3507 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
6d7dc5b9 3508 if (CALL_P (insn))
e13693ec 3509 {
0bb5a6cd 3510 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3511 while (*p)
3512 p = &XEXP (*p, 1);
3513 *p = CALL_INSN_FUNCTION_USAGE (trial);
e13693ec 3514 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
b922281a 3515
3516 /* Update the debug information for the CALL_INSN. */
3517 if (flag_enable_icf_debug)
3518 (*debug_hooks->copy_call_info) (trial, insn);
e13693ec 3519 }
3520 }
5262c253 3521
e13693ec 3522 /* Copy notes, particularly those related to the CFG. */
3523 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3524 {
3525 switch (REG_NOTE_KIND (note))
3526 {
3527 case REG_EH_REGION:
e38def9c 3528 copy_reg_eh_region_note_backward (note, insn_last, NULL);
e13693ec 3529 break;
381eb1e7 3530
e13693ec 3531 case REG_NORETURN:
3532 case REG_SETJMP:
698ff1f0 3533 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
381eb1e7 3534 {
6d7dc5b9 3535 if (CALL_P (insn))
a1ddb869 3536 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
381eb1e7 3537 }
e13693ec 3538 break;
5bb27a4b 3539
e13693ec 3540 case REG_NON_LOCAL_GOTO:
698ff1f0 3541 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
31d3e01c 3542 {
6d7dc5b9 3543 if (JUMP_P (insn))
a1ddb869 3544 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
31d3e01c 3545 }
e13693ec 3546 break;
344dc2fa 3547
698ff1f0 3548#ifdef AUTO_INC_DEC
3549 case REG_INC:
3550 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3551 {
3552 rtx reg = XEXP (note, 0);
3553 if (!FIND_REG_INC_NOTE (insn, reg)
3554 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
a1ddb869 3555 add_reg_note (insn, REG_INC, reg);
698ff1f0 3556 }
3557 break;
3558#endif
3559
e13693ec 3560 default:
3561 break;
15bbde2b 3562 }
e13693ec 3563 }
3564
3565 /* If there are LABELS inside the split insns increment the
3566 usage count so we don't delete the label. */
19d2fe05 3567 if (INSN_P (trial))
e13693ec 3568 {
3569 insn = insn_last;
3570 while (insn != NULL_RTX)
15bbde2b 3571 {
19d2fe05 3572 /* JUMP_P insns have already been "marked" above. */
6d7dc5b9 3573 if (NONJUMP_INSN_P (insn))
e13693ec 3574 mark_label_nuses (PATTERN (insn));
15bbde2b 3575
e13693ec 3576 insn = PREV_INSN (insn);
3577 }
15bbde2b 3578 }
3579
13751393 3580 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
e13693ec 3581
3582 delete_insn (trial);
3583 if (has_barrier)
3584 emit_barrier_after (tem);
3585
3586 /* Recursively call try_split for each new insn created; by the
3587 time control returns here that insn will be fully split, so
3588 set LAST and continue from the insn after the one returned.
3589 We can't use next_active_insn here since AFTER may be a note.
3590 Ignore deleted insns, which can be occur if not optimizing. */
3591 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3592 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3593 tem = try_split (PATTERN (tem), tem, 1);
3594
3595 /* Return either the first or the last insn, depending on which was
3596 requested. */
3597 return last
3598 ? (after ? PREV_INSN (after) : last_insn)
3599 : NEXT_INSN (before);
15bbde2b 3600}
3601\f
3602/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3603 Store PATTERN in the pattern slots. */
15bbde2b 3604
3605rtx
35cb5232 3606make_insn_raw (rtx pattern)
15bbde2b 3607{
19cb6b50 3608 rtx insn;
15bbde2b 3609
d7c47c0e 3610 insn = rtx_alloc (INSN);
15bbde2b 3611
575333f9 3612 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3613 PATTERN (insn) = pattern;
3614 INSN_CODE (insn) = -1;
fc92fa61 3615 REG_NOTES (insn) = NULL;
375c1c8a 3616 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3617 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3618
fe7f701d 3619#ifdef ENABLE_RTL_CHECKING
3620 if (insn
9204e736 3621 && INSN_P (insn)
fe7f701d 3622 && (returnjump_p (insn)
3623 || (GET_CODE (insn) == SET
3624 && SET_DEST (insn) == pc_rtx)))
3625 {
c3ceba8e 3626 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
fe7f701d 3627 debug_rtx (insn);
3628 }
3629#endif
d823ba47 3630
15bbde2b 3631 return insn;
3632}
3633
9845d120 3634/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3635
3636rtx
3637make_debug_insn_raw (rtx pattern)
3638{
3639 rtx insn;
3640
3641 insn = rtx_alloc (DEBUG_INSN);
3642 INSN_UID (insn) = cur_debug_insn_uid++;
3643 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3644 INSN_UID (insn) = cur_insn_uid++;
3645
3646 PATTERN (insn) = pattern;
3647 INSN_CODE (insn) = -1;
3648 REG_NOTES (insn) = NULL;
3649 INSN_LOCATOR (insn) = curr_insn_locator ();
3650 BLOCK_FOR_INSN (insn) = NULL;
3651
3652 return insn;
3653}
3654
31d3e01c 3655/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3656
89140b26 3657rtx
35cb5232 3658make_jump_insn_raw (rtx pattern)
15bbde2b 3659{
19cb6b50 3660 rtx insn;
15bbde2b 3661
6a84e367 3662 insn = rtx_alloc (JUMP_INSN);
fc92fa61 3663 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3664
3665 PATTERN (insn) = pattern;
3666 INSN_CODE (insn) = -1;
fc92fa61 3667 REG_NOTES (insn) = NULL;
3668 JUMP_LABEL (insn) = NULL;
375c1c8a 3669 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3670 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3671
3672 return insn;
3673}
6e911104 3674
31d3e01c 3675/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3676
3677static rtx
35cb5232 3678make_call_insn_raw (rtx pattern)
6e911104 3679{
19cb6b50 3680 rtx insn;
6e911104 3681
3682 insn = rtx_alloc (CALL_INSN);
3683 INSN_UID (insn) = cur_insn_uid++;
3684
3685 PATTERN (insn) = pattern;
3686 INSN_CODE (insn) = -1;
6e911104 3687 REG_NOTES (insn) = NULL;
3688 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
375c1c8a 3689 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3690 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3691
3692 return insn;
3693}
15bbde2b 3694\f
3695/* Add INSN to the end of the doubly-linked list.
3696 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3697
3698void
35cb5232 3699add_insn (rtx insn)
15bbde2b 3700{
3701 PREV_INSN (insn) = last_insn;
3702 NEXT_INSN (insn) = 0;
3703
3704 if (NULL != last_insn)
3705 NEXT_INSN (last_insn) = insn;
3706
3707 if (NULL == first_insn)
3708 first_insn = insn;
3709
3710 last_insn = insn;
3711}
3712
312de84d 3713/* Add INSN into the doubly-linked list after insn AFTER. This and
3714 the next should be the only functions called to insert an insn once
f65c10c0 3715 delay slots have been filled since only they know how to update a
312de84d 3716 SEQUENCE. */
15bbde2b 3717
3718void
3072d30e 3719add_insn_after (rtx insn, rtx after, basic_block bb)
15bbde2b 3720{
3721 rtx next = NEXT_INSN (after);
3722
611234b4 3723 gcc_assert (!optimize || !INSN_DELETED_P (after));
f65c10c0 3724
15bbde2b 3725 NEXT_INSN (insn) = next;
3726 PREV_INSN (insn) = after;
3727
3728 if (next)
3729 {
3730 PREV_INSN (next) = insn;
6d7dc5b9 3731 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
15bbde2b 3732 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3733 }
3734 else if (last_insn == after)
3735 last_insn = insn;
3736 else
3737 {
0a893c29 3738 struct sequence_stack *stack = seq_stack;
15bbde2b 3739 /* Scan all pending sequences too. */
3740 for (; stack; stack = stack->next)
3741 if (after == stack->last)
398f4855 3742 {
3743 stack->last = insn;
3744 break;
3745 }
312de84d 3746
611234b4 3747 gcc_assert (stack);
15bbde2b 3748 }
3749
6d7dc5b9 3750 if (!BARRIER_P (after)
3751 && !BARRIER_P (insn)
9dda7915 3752 && (bb = BLOCK_FOR_INSN (after)))
3753 {
3754 set_block_for_insn (insn, bb);
308f9b79 3755 if (INSN_P (insn))
3072d30e 3756 df_insn_rescan (insn);
9dda7915 3757 /* Should not happen as first in the BB is always
3fb1e43b 3758 either NOTE or LABEL. */
5496dbfc 3759 if (BB_END (bb) == after
9dda7915 3760 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 3761 && !BARRIER_P (insn)
ad4583d9 3762 && !NOTE_INSN_BASIC_BLOCK_P (insn))
5496dbfc 3763 BB_END (bb) = insn;
9dda7915 3764 }
3765
15bbde2b 3766 NEXT_INSN (after) = insn;
6d7dc5b9 3767 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
15bbde2b 3768 {
3769 rtx sequence = PATTERN (after);
3770 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3771 }
3772}
3773
312de84d 3774/* Add INSN into the doubly-linked list before insn BEFORE. This and
3072d30e 3775 the previous should be the only functions called to insert an insn
3776 once delay slots have been filled since only they know how to
3777 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3778 bb from before. */
312de84d 3779
3780void
3072d30e 3781add_insn_before (rtx insn, rtx before, basic_block bb)
312de84d 3782{
3783 rtx prev = PREV_INSN (before);
3784
611234b4 3785 gcc_assert (!optimize || !INSN_DELETED_P (before));
f65c10c0 3786
312de84d 3787 PREV_INSN (insn) = prev;
3788 NEXT_INSN (insn) = before;
3789
3790 if (prev)
3791 {
3792 NEXT_INSN (prev) = insn;
6d7dc5b9 3793 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
312de84d 3794 {
3795 rtx sequence = PATTERN (prev);
3796 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3797 }
3798 }
3799 else if (first_insn == before)
3800 first_insn = insn;
3801 else
3802 {
0a893c29 3803 struct sequence_stack *stack = seq_stack;
312de84d 3804 /* Scan all pending sequences too. */
3805 for (; stack; stack = stack->next)
3806 if (before == stack->first)
398f4855 3807 {
3808 stack->first = insn;
3809 break;
3810 }
312de84d 3811
611234b4 3812 gcc_assert (stack);
312de84d 3813 }
3814
3072d30e 3815 if (!bb
3816 && !BARRIER_P (before)
3817 && !BARRIER_P (insn))
3818 bb = BLOCK_FOR_INSN (before);
3819
3820 if (bb)
9dda7915 3821 {
3822 set_block_for_insn (insn, bb);
308f9b79 3823 if (INSN_P (insn))
3072d30e 3824 df_insn_rescan (insn);
611234b4 3825 /* Should not happen as first in the BB is always either NOTE or
ba821eb1 3826 LABEL. */
611234b4 3827 gcc_assert (BB_HEAD (bb) != insn
3828 /* Avoid clobbering of structure when creating new BB. */
3829 || BARRIER_P (insn)
ad4583d9 3830 || NOTE_INSN_BASIC_BLOCK_P (insn));
9dda7915 3831 }
3832
312de84d 3833 PREV_INSN (before) = insn;
6d7dc5b9 3834 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
312de84d 3835 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3836}
3837
3072d30e 3838
3839/* Replace insn with an deleted instruction note. */
3840
fc3d1695 3841void
3842set_insn_deleted (rtx insn)
3072d30e 3843{
3844 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3845 PUT_CODE (insn, NOTE);
3846 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3847}
3848
3849
7ddcf2bf 3850/* Remove an insn from its doubly-linked list. This function knows how
3851 to handle sequences. */
3852void
35cb5232 3853remove_insn (rtx insn)
7ddcf2bf 3854{
3855 rtx next = NEXT_INSN (insn);
3856 rtx prev = PREV_INSN (insn);
e4bf866d 3857 basic_block bb;
3858
3072d30e 3859 /* Later in the code, the block will be marked dirty. */
3860 df_insn_delete (NULL, INSN_UID (insn));
3861
7ddcf2bf 3862 if (prev)
3863 {
3864 NEXT_INSN (prev) = next;
6d7dc5b9 3865 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
7ddcf2bf 3866 {
3867 rtx sequence = PATTERN (prev);
3868 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3869 }
3870 }
3871 else if (first_insn == insn)
3872 first_insn = next;
3873 else
3874 {
0a893c29 3875 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3876 /* Scan all pending sequences too. */
3877 for (; stack; stack = stack->next)
3878 if (insn == stack->first)
3879 {
3880 stack->first = next;
3881 break;
3882 }
3883
611234b4 3884 gcc_assert (stack);
7ddcf2bf 3885 }
3886
3887 if (next)
3888 {
3889 PREV_INSN (next) = prev;
6d7dc5b9 3890 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
7ddcf2bf 3891 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3892 }
3893 else if (last_insn == insn)
3894 last_insn = prev;
3895 else
3896 {
0a893c29 3897 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3898 /* Scan all pending sequences too. */
3899 for (; stack; stack = stack->next)
3900 if (insn == stack->last)
3901 {
3902 stack->last = prev;
3903 break;
3904 }
3905
611234b4 3906 gcc_assert (stack);
7ddcf2bf 3907 }
6d7dc5b9 3908 if (!BARRIER_P (insn)
e4bf866d 3909 && (bb = BLOCK_FOR_INSN (insn)))
3910 {
308f9b79 3911 if (INSN_P (insn))
3072d30e 3912 df_set_bb_dirty (bb);
5496dbfc 3913 if (BB_HEAD (bb) == insn)
e4bf866d 3914 {
f4aee538 3915 /* Never ever delete the basic block note without deleting whole
3916 basic block. */
611234b4 3917 gcc_assert (!NOTE_P (insn));
5496dbfc 3918 BB_HEAD (bb) = next;
e4bf866d 3919 }
5496dbfc 3920 if (BB_END (bb) == insn)
3921 BB_END (bb) = prev;
e4bf866d 3922 }
7ddcf2bf 3923}
3924
d5f9786f 3925/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3926
3927void
35cb5232 3928add_function_usage_to (rtx call_insn, rtx call_fusage)
d5f9786f 3929{
611234b4 3930 gcc_assert (call_insn && CALL_P (call_insn));
d5f9786f 3931
3932 /* Put the register usage information on the CALL. If there is already
3933 some usage information, put ours at the end. */
3934 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3935 {
3936 rtx link;
3937
3938 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3939 link = XEXP (link, 1))
3940 ;
3941
3942 XEXP (link, 1) = call_fusage;
3943 }
3944 else
3945 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3946}
3947
15bbde2b 3948/* Delete all insns made since FROM.
3949 FROM becomes the new last instruction. */
3950
3951void
35cb5232 3952delete_insns_since (rtx from)
15bbde2b 3953{
3954 if (from == 0)
3955 first_insn = 0;
3956 else
3957 NEXT_INSN (from) = 0;
3958 last_insn = from;
3959}
3960
34e2ddcd 3961/* This function is deprecated, please use sequences instead.
3962
3963 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 3964 The insns to be moved are those between FROM and TO.
3965 They are moved to a new position after the insn AFTER.
3966 AFTER must not be FROM or TO or any insn in between.
3967
3968 This function does not know about SEQUENCEs and hence should not be
3969 called after delay-slot filling has been done. */
3970
3971void
35cb5232 3972reorder_insns_nobb (rtx from, rtx to, rtx after)
15bbde2b 3973{
3974 /* Splice this bunch out of where it is now. */
3975 if (PREV_INSN (from))
3976 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3977 if (NEXT_INSN (to))
3978 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3979 if (last_insn == to)
3980 last_insn = PREV_INSN (from);
3981 if (first_insn == from)
3982 first_insn = NEXT_INSN (to);
3983
3984 /* Make the new neighbors point to it and it to them. */
3985 if (NEXT_INSN (after))
3986 PREV_INSN (NEXT_INSN (after)) = to;
3987
3988 NEXT_INSN (to) = NEXT_INSN (after);
3989 PREV_INSN (from) = after;
3990 NEXT_INSN (after) = from;
3991 if (after == last_insn)
3992 last_insn = to;
3993}
3994
9dda7915 3995/* Same as function above, but take care to update BB boundaries. */
3996void
35cb5232 3997reorder_insns (rtx from, rtx to, rtx after)
9dda7915 3998{
3999 rtx prev = PREV_INSN (from);
4000 basic_block bb, bb2;
4001
4002 reorder_insns_nobb (from, to, after);
4003
6d7dc5b9 4004 if (!BARRIER_P (after)
9dda7915 4005 && (bb = BLOCK_FOR_INSN (after)))
4006 {
4007 rtx x;
3072d30e 4008 df_set_bb_dirty (bb);
d4c5e26d 4009
6d7dc5b9 4010 if (!BARRIER_P (from)
9dda7915 4011 && (bb2 = BLOCK_FOR_INSN (from)))
4012 {
5496dbfc 4013 if (BB_END (bb2) == to)
4014 BB_END (bb2) = prev;
3072d30e 4015 df_set_bb_dirty (bb2);
9dda7915 4016 }
4017
5496dbfc 4018 if (BB_END (bb) == after)
4019 BB_END (bb) = to;
9dda7915 4020
4021 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7097dd0c 4022 if (!BARRIER_P (x))
a2bdd643 4023 df_insn_change_bb (x, bb);
9dda7915 4024 }
4025}
4026
15bbde2b 4027\f
31d3e01c 4028/* Emit insn(s) of given code and pattern
4029 at a specified place within the doubly-linked list.
15bbde2b 4030
31d3e01c 4031 All of the emit_foo global entry points accept an object
4032 X which is either an insn list or a PATTERN of a single
4033 instruction.
15bbde2b 4034
31d3e01c 4035 There are thus a few canonical ways to generate code and
4036 emit it at a specific place in the instruction stream. For
4037 example, consider the instruction named SPOT and the fact that
4038 we would like to emit some instructions before SPOT. We might
4039 do it like this:
15bbde2b 4040
31d3e01c 4041 start_sequence ();
4042 ... emit the new instructions ...
4043 insns_head = get_insns ();
4044 end_sequence ();
15bbde2b 4045
31d3e01c 4046 emit_insn_before (insns_head, SPOT);
15bbde2b 4047
31d3e01c 4048 It used to be common to generate SEQUENCE rtl instead, but that
4049 is a relic of the past which no longer occurs. The reason is that
4050 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4051 generated would almost certainly die right after it was created. */
15bbde2b 4052
31d3e01c 4053/* Make X be output before the instruction BEFORE. */
15bbde2b 4054
4055rtx
3072d30e 4056emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
15bbde2b 4057{
31d3e01c 4058 rtx last = before;
19cb6b50 4059 rtx insn;
15bbde2b 4060
611234b4 4061 gcc_assert (before);
31d3e01c 4062
4063 if (x == NULL_RTX)
4064 return last;
4065
4066 switch (GET_CODE (x))
15bbde2b 4067 {
9845d120 4068 case DEBUG_INSN:
31d3e01c 4069 case INSN:
4070 case JUMP_INSN:
4071 case CALL_INSN:
4072 case CODE_LABEL:
4073 case BARRIER:
4074 case NOTE:
4075 insn = x;
4076 while (insn)
4077 {
4078 rtx next = NEXT_INSN (insn);
3072d30e 4079 add_insn_before (insn, before, bb);
31d3e01c 4080 last = insn;
4081 insn = next;
4082 }
4083 break;
4084
4085#ifdef ENABLE_RTL_CHECKING
4086 case SEQUENCE:
611234b4 4087 gcc_unreachable ();
31d3e01c 4088 break;
4089#endif
4090
4091 default:
4092 last = make_insn_raw (x);
3072d30e 4093 add_insn_before (last, before, bb);
31d3e01c 4094 break;
15bbde2b 4095 }
4096
31d3e01c 4097 return last;
15bbde2b 4098}
4099
31d3e01c 4100/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4101 and output it before the instruction BEFORE. */
4102
4103rtx
0891f67c 4104emit_jump_insn_before_noloc (rtx x, rtx before)
15bbde2b 4105{
d90b3d04 4106 rtx insn, last = NULL_RTX;
6e911104 4107
611234b4 4108 gcc_assert (before);
31d3e01c 4109
4110 switch (GET_CODE (x))
6e911104 4111 {
9845d120 4112 case DEBUG_INSN:
31d3e01c 4113 case INSN:
4114 case JUMP_INSN:
4115 case CALL_INSN:
4116 case CODE_LABEL:
4117 case BARRIER:
4118 case NOTE:
4119 insn = x;
4120 while (insn)
4121 {
4122 rtx next = NEXT_INSN (insn);
3072d30e 4123 add_insn_before (insn, before, NULL);
31d3e01c 4124 last = insn;
4125 insn = next;
4126 }
4127 break;
4128
4129#ifdef ENABLE_RTL_CHECKING
4130 case SEQUENCE:
611234b4 4131 gcc_unreachable ();
31d3e01c 4132 break;
4133#endif
4134
4135 default:
4136 last = make_jump_insn_raw (x);
3072d30e 4137 add_insn_before (last, before, NULL);
31d3e01c 4138 break;
6e911104 4139 }
4140
31d3e01c 4141 return last;
15bbde2b 4142}
4143
31d3e01c 4144/* Make an instruction with body X and code CALL_INSN
cd0fe062 4145 and output it before the instruction BEFORE. */
4146
4147rtx
0891f67c 4148emit_call_insn_before_noloc (rtx x, rtx before)
cd0fe062 4149{
d90b3d04 4150 rtx last = NULL_RTX, insn;
cd0fe062 4151
611234b4 4152 gcc_assert (before);
31d3e01c 4153
4154 switch (GET_CODE (x))
cd0fe062 4155 {
9845d120 4156 case DEBUG_INSN:
31d3e01c 4157 case INSN:
4158 case JUMP_INSN:
4159 case CALL_INSN:
4160 case CODE_LABEL:
4161 case BARRIER:
4162 case NOTE:
4163 insn = x;
4164 while (insn)
4165 {
4166 rtx next = NEXT_INSN (insn);
3072d30e 4167 add_insn_before (insn, before, NULL);
31d3e01c 4168 last = insn;
4169 insn = next;
4170 }
4171 break;
4172
4173#ifdef ENABLE_RTL_CHECKING
4174 case SEQUENCE:
611234b4 4175 gcc_unreachable ();
31d3e01c 4176 break;
4177#endif
4178
4179 default:
4180 last = make_call_insn_raw (x);
3072d30e 4181 add_insn_before (last, before, NULL);
31d3e01c 4182 break;
cd0fe062 4183 }
4184
31d3e01c 4185 return last;
cd0fe062 4186}
4187
9845d120 4188/* Make an instruction with body X and code DEBUG_INSN
4189 and output it before the instruction BEFORE. */
4190
4191rtx
4192emit_debug_insn_before_noloc (rtx x, rtx before)
4193{
4194 rtx last = NULL_RTX, insn;
4195
4196 gcc_assert (before);
4197
4198 switch (GET_CODE (x))
4199 {
4200 case DEBUG_INSN:
4201 case INSN:
4202 case JUMP_INSN:
4203 case CALL_INSN:
4204 case CODE_LABEL:
4205 case BARRIER:
4206 case NOTE:
4207 insn = x;
4208 while (insn)
4209 {
4210 rtx next = NEXT_INSN (insn);
4211 add_insn_before (insn, before, NULL);
4212 last = insn;
4213 insn = next;
4214 }
4215 break;
4216
4217#ifdef ENABLE_RTL_CHECKING
4218 case SEQUENCE:
4219 gcc_unreachable ();
4220 break;
4221#endif
4222
4223 default:
4224 last = make_debug_insn_raw (x);
4225 add_insn_before (last, before, NULL);
4226 break;
4227 }
4228
4229 return last;
4230}
4231
15bbde2b 4232/* Make an insn of code BARRIER
71caadc0 4233 and output it before the insn BEFORE. */
15bbde2b 4234
4235rtx
35cb5232 4236emit_barrier_before (rtx before)
15bbde2b 4237{
19cb6b50 4238 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4239
4240 INSN_UID (insn) = cur_insn_uid++;
4241
3072d30e 4242 add_insn_before (insn, before, NULL);
15bbde2b 4243 return insn;
4244}
4245
71caadc0 4246/* Emit the label LABEL before the insn BEFORE. */
4247
4248rtx
35cb5232 4249emit_label_before (rtx label, rtx before)
71caadc0 4250{
4251 /* This can be called twice for the same label as a result of the
4252 confusion that follows a syntax error! So make it harmless. */
4253 if (INSN_UID (label) == 0)
4254 {
4255 INSN_UID (label) = cur_insn_uid++;
3072d30e 4256 add_insn_before (label, before, NULL);
71caadc0 4257 }
4258
4259 return label;
4260}
4261
15bbde2b 4262/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4263
4264rtx
ad4583d9 4265emit_note_before (enum insn_note subtype, rtx before)
15bbde2b 4266{
19cb6b50 4267 rtx note = rtx_alloc (NOTE);
15bbde2b 4268 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4269 NOTE_KIND (note) = subtype;
ab87d1bc 4270 BLOCK_FOR_INSN (note) = NULL;
60ad3b0e 4271 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
15bbde2b 4272
3072d30e 4273 add_insn_before (note, before, NULL);
15bbde2b 4274 return note;
4275}
4276\f
31d3e01c 4277/* Helper for emit_insn_after, handles lists of instructions
4278 efficiently. */
15bbde2b 4279
31d3e01c 4280static rtx
3072d30e 4281emit_insn_after_1 (rtx first, rtx after, basic_block bb)
15bbde2b 4282{
31d3e01c 4283 rtx last;
4284 rtx after_after;
3072d30e 4285 if (!bb && !BARRIER_P (after))
4286 bb = BLOCK_FOR_INSN (after);
15bbde2b 4287
3072d30e 4288 if (bb)
15bbde2b 4289 {
3072d30e 4290 df_set_bb_dirty (bb);
31d3e01c 4291 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
6d7dc5b9 4292 if (!BARRIER_P (last))
3072d30e 4293 {
4294 set_block_for_insn (last, bb);
4295 df_insn_rescan (last);
4296 }
6d7dc5b9 4297 if (!BARRIER_P (last))
3072d30e 4298 {
4299 set_block_for_insn (last, bb);
4300 df_insn_rescan (last);
4301 }
5496dbfc 4302 if (BB_END (bb) == after)
4303 BB_END (bb) = last;
15bbde2b 4304 }
4305 else
31d3e01c 4306 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4307 continue;
4308
4309 after_after = NEXT_INSN (after);
4310
4311 NEXT_INSN (after) = first;
4312 PREV_INSN (first) = after;
4313 NEXT_INSN (last) = after_after;
4314 if (after_after)
4315 PREV_INSN (after_after) = last;
4316
4317 if (after == last_insn)
4318 last_insn = last;
e1ab7874 4319
31d3e01c 4320 return last;
4321}
4322
3072d30e 4323/* Make X be output after the insn AFTER and set the BB of insn. If
4324 BB is NULL, an attempt is made to infer the BB from AFTER. */
31d3e01c 4325
4326rtx
3072d30e 4327emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
31d3e01c 4328{
4329 rtx last = after;
4330
611234b4 4331 gcc_assert (after);
31d3e01c 4332
4333 if (x == NULL_RTX)
4334 return last;
4335
4336 switch (GET_CODE (x))
15bbde2b 4337 {
9845d120 4338 case DEBUG_INSN:
31d3e01c 4339 case INSN:
4340 case JUMP_INSN:
4341 case CALL_INSN:
4342 case CODE_LABEL:
4343 case BARRIER:
4344 case NOTE:
3072d30e 4345 last = emit_insn_after_1 (x, after, bb);
31d3e01c 4346 break;
4347
4348#ifdef ENABLE_RTL_CHECKING
4349 case SEQUENCE:
611234b4 4350 gcc_unreachable ();
31d3e01c 4351 break;
4352#endif
4353
4354 default:
4355 last = make_insn_raw (x);
3072d30e 4356 add_insn_after (last, after, bb);
31d3e01c 4357 break;
15bbde2b 4358 }
4359
31d3e01c 4360 return last;
15bbde2b 4361}
4362
1bea98fb 4363
31d3e01c 4364/* Make an insn of code JUMP_INSN with body X
15bbde2b 4365 and output it after the insn AFTER. */
4366
4367rtx
0891f67c 4368emit_jump_insn_after_noloc (rtx x, rtx after)
15bbde2b 4369{
31d3e01c 4370 rtx last;
15bbde2b 4371
611234b4 4372 gcc_assert (after);
31d3e01c 4373
4374 switch (GET_CODE (x))
15bbde2b 4375 {
9845d120 4376 case DEBUG_INSN:
31d3e01c 4377 case INSN:
4378 case JUMP_INSN:
4379 case CALL_INSN:
4380 case CODE_LABEL:
4381 case BARRIER:
4382 case NOTE:
3072d30e 4383 last = emit_insn_after_1 (x, after, NULL);
31d3e01c 4384 break;
4385
4386#ifdef ENABLE_RTL_CHECKING
4387 case SEQUENCE:
611234b4 4388 gcc_unreachable ();
31d3e01c 4389 break;
4390#endif
4391
4392 default:
4393 last = make_jump_insn_raw (x);
3072d30e 4394 add_insn_after (last, after, NULL);
31d3e01c 4395 break;
15bbde2b 4396 }
4397
31d3e01c 4398 return last;
4399}
4400
4401/* Make an instruction with body X and code CALL_INSN
4402 and output it after the instruction AFTER. */
4403
4404rtx
0891f67c 4405emit_call_insn_after_noloc (rtx x, rtx after)
31d3e01c 4406{
4407 rtx last;
4408
611234b4 4409 gcc_assert (after);
31d3e01c 4410
4411 switch (GET_CODE (x))
4412 {
9845d120 4413 case DEBUG_INSN:
31d3e01c 4414 case INSN:
4415 case JUMP_INSN:
4416 case CALL_INSN:
4417 case CODE_LABEL:
4418 case BARRIER:
4419 case NOTE:
3072d30e 4420 last = emit_insn_after_1 (x, after, NULL);
31d3e01c 4421 break;
4422
4423#ifdef ENABLE_RTL_CHECKING
4424 case SEQUENCE:
611234b4 4425 gcc_unreachable ();
31d3e01c 4426 break;
4427#endif
4428
4429 default:
4430 last = make_call_insn_raw (x);
3072d30e 4431 add_insn_after (last, after, NULL);
31d3e01c 4432 break;
4433 }
4434
4435 return last;
15bbde2b 4436}
4437
9845d120 4438/* Make an instruction with body X and code CALL_INSN
4439 and output it after the instruction AFTER. */
4440
4441rtx
4442emit_debug_insn_after_noloc (rtx x, rtx after)
4443{
4444 rtx last;
4445
4446 gcc_assert (after);
4447
4448 switch (GET_CODE (x))
4449 {
4450 case DEBUG_INSN:
4451 case INSN:
4452 case JUMP_INSN:
4453 case CALL_INSN:
4454 case CODE_LABEL:
4455 case BARRIER:
4456 case NOTE:
4457 last = emit_insn_after_1 (x, after, NULL);
4458 break;
4459
4460#ifdef ENABLE_RTL_CHECKING
4461 case SEQUENCE:
4462 gcc_unreachable ();
4463 break;
4464#endif
4465
4466 default:
4467 last = make_debug_insn_raw (x);
4468 add_insn_after (last, after, NULL);
4469 break;
4470 }
4471
4472 return last;
4473}
4474
15bbde2b 4475/* Make an insn of code BARRIER
4476 and output it after the insn AFTER. */
4477
4478rtx
35cb5232 4479emit_barrier_after (rtx after)
15bbde2b 4480{
19cb6b50 4481 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4482
4483 INSN_UID (insn) = cur_insn_uid++;
4484
3072d30e 4485 add_insn_after (insn, after, NULL);
15bbde2b 4486 return insn;
4487}
4488
4489/* Emit the label LABEL after the insn AFTER. */
4490
4491rtx
35cb5232 4492emit_label_after (rtx label, rtx after)
15bbde2b 4493{
4494 /* This can be called twice for the same label
4495 as a result of the confusion that follows a syntax error!
4496 So make it harmless. */
4497 if (INSN_UID (label) == 0)
4498 {
4499 INSN_UID (label) = cur_insn_uid++;
3072d30e 4500 add_insn_after (label, after, NULL);
15bbde2b 4501 }
4502
4503 return label;
4504}
4505
4506/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4507
4508rtx
ad4583d9 4509emit_note_after (enum insn_note subtype, rtx after)
15bbde2b 4510{
19cb6b50 4511 rtx note = rtx_alloc (NOTE);
15bbde2b 4512 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4513 NOTE_KIND (note) = subtype;
ab87d1bc 4514 BLOCK_FOR_INSN (note) = NULL;
60ad3b0e 4515 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3072d30e 4516 add_insn_after (note, after, NULL);
15bbde2b 4517 return note;
4518}
15bbde2b 4519\f
0891f67c 4520/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4521rtx
35cb5232 4522emit_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4523{
3072d30e 4524 rtx last = emit_insn_after_noloc (pattern, after, NULL);
d321a68b 4525
0891f67c 4526 if (pattern == NULL_RTX || !loc)
ca154f3f 4527 return last;
4528
31d3e01c 4529 after = NEXT_INSN (after);
4530 while (1)
4531 {
0891f67c 4532 if (active_insn_p (after) && !INSN_LOCATOR (after))
13751393 4533 INSN_LOCATOR (after) = loc;
31d3e01c 4534 if (after == last)
4535 break;
4536 after = NEXT_INSN (after);
4537 }
d321a68b 4538 return last;
4539}
4540
0891f67c 4541/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4542rtx
4543emit_insn_after (rtx pattern, rtx after)
4544{
9845d120 4545 rtx prev = after;
4546
4547 while (DEBUG_INSN_P (prev))
4548 prev = PREV_INSN (prev);
4549
4550 if (INSN_P (prev))
4551 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
0891f67c 4552 else
3072d30e 4553 return emit_insn_after_noloc (pattern, after, NULL);
0891f67c 4554}
4555
4556/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4557rtx
35cb5232 4558emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4559{
0891f67c 4560 rtx last = emit_jump_insn_after_noloc (pattern, after);
31d3e01c 4561
0891f67c 4562 if (pattern == NULL_RTX || !loc)
ca154f3f 4563 return last;
4564
31d3e01c 4565 after = NEXT_INSN (after);
4566 while (1)
4567 {
0891f67c 4568 if (active_insn_p (after) && !INSN_LOCATOR (after))
13751393 4569 INSN_LOCATOR (after) = loc;
31d3e01c 4570 if (after == last)
4571 break;
4572 after = NEXT_INSN (after);
4573 }
d321a68b 4574 return last;
4575}
4576
0891f67c 4577/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4578rtx
4579emit_jump_insn_after (rtx pattern, rtx after)
4580{
9845d120 4581 rtx prev = after;
4582
4583 while (DEBUG_INSN_P (prev))
4584 prev = PREV_INSN (prev);
4585
4586 if (INSN_P (prev))
4587 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
0891f67c 4588 else
4589 return emit_jump_insn_after_noloc (pattern, after);
4590}
4591
4592/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4593rtx
35cb5232 4594emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4595{
0891f67c 4596 rtx last = emit_call_insn_after_noloc (pattern, after);
31d3e01c 4597
0891f67c 4598 if (pattern == NULL_RTX || !loc)
ca154f3f 4599 return last;
4600
31d3e01c 4601 after = NEXT_INSN (after);
4602 while (1)
4603 {
0891f67c 4604 if (active_insn_p (after) && !INSN_LOCATOR (after))
13751393 4605 INSN_LOCATOR (after) = loc;
31d3e01c 4606 if (after == last)
4607 break;
4608 after = NEXT_INSN (after);
4609 }
d321a68b 4610 return last;
4611}
4612
0891f67c 4613/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4614rtx
4615emit_call_insn_after (rtx pattern, rtx after)
4616{
9845d120 4617 rtx prev = after;
4618
4619 while (DEBUG_INSN_P (prev))
4620 prev = PREV_INSN (prev);
4621
4622 if (INSN_P (prev))
4623 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
0891f67c 4624 else
4625 return emit_call_insn_after_noloc (pattern, after);
4626}
4627
9845d120 4628/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4629rtx
4630emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4631{
4632 rtx last = emit_debug_insn_after_noloc (pattern, after);
4633
4634 if (pattern == NULL_RTX || !loc)
4635 return last;
4636
4637 after = NEXT_INSN (after);
4638 while (1)
4639 {
4640 if (active_insn_p (after) && !INSN_LOCATOR (after))
4641 INSN_LOCATOR (after) = loc;
4642 if (after == last)
4643 break;
4644 after = NEXT_INSN (after);
4645 }
4646 return last;
4647}
4648
4649/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4650rtx
4651emit_debug_insn_after (rtx pattern, rtx after)
4652{
4653 if (INSN_P (after))
4654 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4655 else
4656 return emit_debug_insn_after_noloc (pattern, after);
4657}
4658
0891f67c 4659/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4660rtx
35cb5232 4661emit_insn_before_setloc (rtx pattern, rtx before, int loc)
d321a68b 4662{
4663 rtx first = PREV_INSN (before);
3072d30e 4664 rtx last = emit_insn_before_noloc (pattern, before, NULL);
0891f67c 4665
4666 if (pattern == NULL_RTX || !loc)
4667 return last;
4668
4486418e 4669 if (!first)
4670 first = get_insns ();
4671 else
4672 first = NEXT_INSN (first);
0891f67c 4673 while (1)
4674 {
4675 if (active_insn_p (first) && !INSN_LOCATOR (first))
4676 INSN_LOCATOR (first) = loc;
4677 if (first == last)
4678 break;
4679 first = NEXT_INSN (first);
4680 }
4681 return last;
4682}
4683
4684/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4685rtx
4686emit_insn_before (rtx pattern, rtx before)
4687{
9845d120 4688 rtx next = before;
4689
4690 while (DEBUG_INSN_P (next))
4691 next = PREV_INSN (next);
4692
4693 if (INSN_P (next))
4694 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
0891f67c 4695 else
3072d30e 4696 return emit_insn_before_noloc (pattern, before, NULL);
0891f67c 4697}
4698
4699/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4700rtx
4701emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4702{
4703 rtx first = PREV_INSN (before);
4704 rtx last = emit_jump_insn_before_noloc (pattern, before);
4705
4706 if (pattern == NULL_RTX)
4707 return last;
4708
4709 first = NEXT_INSN (first);
4710 while (1)
4711 {
4712 if (active_insn_p (first) && !INSN_LOCATOR (first))
4713 INSN_LOCATOR (first) = loc;
4714 if (first == last)
4715 break;
4716 first = NEXT_INSN (first);
4717 }
4718 return last;
4719}
4720
4721/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4722rtx
4723emit_jump_insn_before (rtx pattern, rtx before)
4724{
9845d120 4725 rtx next = before;
4726
4727 while (DEBUG_INSN_P (next))
4728 next = PREV_INSN (next);
4729
4730 if (INSN_P (next))
4731 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
0891f67c 4732 else
4733 return emit_jump_insn_before_noloc (pattern, before);
4734}
4735
4736/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4737rtx
4738emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4739{
4740 rtx first = PREV_INSN (before);
4741 rtx last = emit_call_insn_before_noloc (pattern, before);
d321a68b 4742
ca154f3f 4743 if (pattern == NULL_RTX)
4744 return last;
4745
31d3e01c 4746 first = NEXT_INSN (first);
4747 while (1)
4748 {
0891f67c 4749 if (active_insn_p (first) && !INSN_LOCATOR (first))
13751393 4750 INSN_LOCATOR (first) = loc;
31d3e01c 4751 if (first == last)
4752 break;
4753 first = NEXT_INSN (first);
4754 }
d321a68b 4755 return last;
4756}
0891f67c 4757
4758/* like emit_call_insn_before_noloc,
4759 but set insn_locator according to before. */
4760rtx
4761emit_call_insn_before (rtx pattern, rtx before)
4762{
9845d120 4763 rtx next = before;
4764
4765 while (DEBUG_INSN_P (next))
4766 next = PREV_INSN (next);
4767
4768 if (INSN_P (next))
4769 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
0891f67c 4770 else
4771 return emit_call_insn_before_noloc (pattern, before);
4772}
9845d120 4773
4774/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4775rtx
4776emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4777{
4778 rtx first = PREV_INSN (before);
4779 rtx last = emit_debug_insn_before_noloc (pattern, before);
4780
4781 if (pattern == NULL_RTX)
4782 return last;
4783
4784 first = NEXT_INSN (first);
4785 while (1)
4786 {
4787 if (active_insn_p (first) && !INSN_LOCATOR (first))
4788 INSN_LOCATOR (first) = loc;
4789 if (first == last)
4790 break;
4791 first = NEXT_INSN (first);
4792 }
4793 return last;
4794}
4795
4796/* like emit_debug_insn_before_noloc,
4797 but set insn_locator according to before. */
4798rtx
4799emit_debug_insn_before (rtx pattern, rtx before)
4800{
4801 if (INSN_P (before))
4802 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4803 else
4804 return emit_debug_insn_before_noloc (pattern, before);
4805}
d321a68b 4806\f
31d3e01c 4807/* Take X and emit it at the end of the doubly-linked
4808 INSN list.
15bbde2b 4809
4810 Returns the last insn emitted. */
4811
4812rtx
35cb5232 4813emit_insn (rtx x)
15bbde2b 4814{
31d3e01c 4815 rtx last = last_insn;
4816 rtx insn;
15bbde2b 4817
31d3e01c 4818 if (x == NULL_RTX)
4819 return last;
15bbde2b 4820
31d3e01c 4821 switch (GET_CODE (x))
4822 {
9845d120 4823 case DEBUG_INSN:
31d3e01c 4824 case INSN:
4825 case JUMP_INSN:
4826 case CALL_INSN:
4827 case CODE_LABEL:
4828 case BARRIER:
4829 case NOTE:
4830 insn = x;
4831 while (insn)
15bbde2b 4832 {
31d3e01c 4833 rtx next = NEXT_INSN (insn);
15bbde2b 4834 add_insn (insn);
31d3e01c 4835 last = insn;
4836 insn = next;
15bbde2b 4837 }
31d3e01c 4838 break;
15bbde2b 4839
31d3e01c 4840#ifdef ENABLE_RTL_CHECKING
4841 case SEQUENCE:
611234b4 4842 gcc_unreachable ();
31d3e01c 4843 break;
4844#endif
15bbde2b 4845
31d3e01c 4846 default:
4847 last = make_insn_raw (x);
4848 add_insn (last);
4849 break;
15bbde2b 4850 }
4851
4852 return last;
4853}
4854
9845d120 4855/* Make an insn of code DEBUG_INSN with pattern X
4856 and add it to the end of the doubly-linked list. */
4857
4858rtx
4859emit_debug_insn (rtx x)
4860{
4861 rtx last = last_insn;
4862 rtx insn;
4863
4864 if (x == NULL_RTX)
4865 return last;
4866
4867 switch (GET_CODE (x))
4868 {
4869 case DEBUG_INSN:
4870 case INSN:
4871 case JUMP_INSN:
4872 case CALL_INSN:
4873 case CODE_LABEL:
4874 case BARRIER:
4875 case NOTE:
4876 insn = x;
4877 while (insn)
4878 {
4879 rtx next = NEXT_INSN (insn);
4880 add_insn (insn);
4881 last = insn;
4882 insn = next;
4883 }
4884 break;
4885
4886#ifdef ENABLE_RTL_CHECKING
4887 case SEQUENCE:
4888 gcc_unreachable ();
4889 break;
4890#endif
4891
4892 default:
4893 last = make_debug_insn_raw (x);
4894 add_insn (last);
4895 break;
4896 }
4897
4898 return last;
4899}
4900
31d3e01c 4901/* Make an insn of code JUMP_INSN with pattern X
4902 and add it to the end of the doubly-linked list. */
15bbde2b 4903
4904rtx
35cb5232 4905emit_jump_insn (rtx x)
15bbde2b 4906{
d90b3d04 4907 rtx last = NULL_RTX, insn;
15bbde2b 4908
31d3e01c 4909 switch (GET_CODE (x))
15bbde2b 4910 {
9845d120 4911 case DEBUG_INSN:
31d3e01c 4912 case INSN:
4913 case JUMP_INSN:
4914 case CALL_INSN:
4915 case CODE_LABEL:
4916 case BARRIER:
4917 case NOTE:
4918 insn = x;
4919 while (insn)
4920 {
4921 rtx next = NEXT_INSN (insn);
4922 add_insn (insn);
4923 last = insn;
4924 insn = next;
4925 }
4926 break;
b36b07d8 4927
31d3e01c 4928#ifdef ENABLE_RTL_CHECKING
4929 case SEQUENCE:
611234b4 4930 gcc_unreachable ();
31d3e01c 4931 break;
4932#endif
b36b07d8 4933
31d3e01c 4934 default:
4935 last = make_jump_insn_raw (x);
4936 add_insn (last);
4937 break;
9dda7915 4938 }
b36b07d8 4939
4940 return last;
4941}
4942
31d3e01c 4943/* Make an insn of code CALL_INSN with pattern X
15bbde2b 4944 and add it to the end of the doubly-linked list. */
4945
4946rtx
35cb5232 4947emit_call_insn (rtx x)
15bbde2b 4948{
31d3e01c 4949 rtx insn;
4950
4951 switch (GET_CODE (x))
15bbde2b 4952 {
9845d120 4953 case DEBUG_INSN:
31d3e01c 4954 case INSN:
4955 case JUMP_INSN:
4956 case CALL_INSN:
4957 case CODE_LABEL:
4958 case BARRIER:
4959 case NOTE:
4960 insn = emit_insn (x);
4961 break;
15bbde2b 4962
31d3e01c 4963#ifdef ENABLE_RTL_CHECKING
4964 case SEQUENCE:
611234b4 4965 gcc_unreachable ();
31d3e01c 4966 break;
4967#endif
15bbde2b 4968
31d3e01c 4969 default:
4970 insn = make_call_insn_raw (x);
15bbde2b 4971 add_insn (insn);
31d3e01c 4972 break;
15bbde2b 4973 }
31d3e01c 4974
4975 return insn;
15bbde2b 4976}
4977
4978/* Add the label LABEL to the end of the doubly-linked list. */
4979
4980rtx
35cb5232 4981emit_label (rtx label)
15bbde2b 4982{
4983 /* This can be called twice for the same label
4984 as a result of the confusion that follows a syntax error!
4985 So make it harmless. */
4986 if (INSN_UID (label) == 0)
4987 {
4988 INSN_UID (label) = cur_insn_uid++;
4989 add_insn (label);
4990 }
4991 return label;
4992}
4993
4994/* Make an insn of code BARRIER
4995 and add it to the end of the doubly-linked list. */
4996
4997rtx
35cb5232 4998emit_barrier (void)
15bbde2b 4999{
19cb6b50 5000 rtx barrier = rtx_alloc (BARRIER);
15bbde2b 5001 INSN_UID (barrier) = cur_insn_uid++;
5002 add_insn (barrier);
5003 return barrier;
5004}
5005
2f57e3d9 5006/* Emit a copy of note ORIG. */
35cb5232 5007
2f57e3d9 5008rtx
5009emit_note_copy (rtx orig)
5010{
5011 rtx note;
5012
2f57e3d9 5013 note = rtx_alloc (NOTE);
5014
5015 INSN_UID (note) = cur_insn_uid++;
5016 NOTE_DATA (note) = NOTE_DATA (orig);
ad4583d9 5017 NOTE_KIND (note) = NOTE_KIND (orig);
2f57e3d9 5018 BLOCK_FOR_INSN (note) = NULL;
5019 add_insn (note);
5020
31b97e8f 5021 return note;
15bbde2b 5022}
5023
31b97e8f 5024/* Make an insn of code NOTE or type NOTE_NO
5025 and add it to the end of the doubly-linked list. */
15bbde2b 5026
5027rtx
ad4583d9 5028emit_note (enum insn_note kind)
15bbde2b 5029{
19cb6b50 5030 rtx note;
15bbde2b 5031
15bbde2b 5032 note = rtx_alloc (NOTE);
5033 INSN_UID (note) = cur_insn_uid++;
ad4583d9 5034 NOTE_KIND (note) = kind;
6c7786cb 5035 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ab87d1bc 5036 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 5037 add_insn (note);
5038 return note;
5039}
5040
18b42941 5041/* Emit a clobber of lvalue X. */
5042
5043rtx
5044emit_clobber (rtx x)
5045{
5046 /* CONCATs should not appear in the insn stream. */
5047 if (GET_CODE (x) == CONCAT)
5048 {
5049 emit_clobber (XEXP (x, 0));
5050 return emit_clobber (XEXP (x, 1));
5051 }
5052 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5053}
5054
5055/* Return a sequence of insns to clobber lvalue X. */
5056
5057rtx
5058gen_clobber (rtx x)
5059{
5060 rtx seq;
5061
5062 start_sequence ();
5063 emit_clobber (x);
5064 seq = get_insns ();
5065 end_sequence ();
5066 return seq;
5067}
5068
5069/* Emit a use of rvalue X. */
5070
5071rtx
5072emit_use (rtx x)
5073{
5074 /* CONCATs should not appear in the insn stream. */
5075 if (GET_CODE (x) == CONCAT)
5076 {
5077 emit_use (XEXP (x, 0));
5078 return emit_use (XEXP (x, 1));
5079 }
5080 return emit_insn (gen_rtx_USE (VOIDmode, x));
5081}
5082
5083/* Return a sequence of insns to use rvalue X. */
5084
5085rtx
5086gen_use (rtx x)
5087{
5088 rtx seq;
5089
5090 start_sequence ();
5091 emit_use (x);
5092 seq = get_insns ();
5093 end_sequence ();
5094 return seq;
5095}
5096
15bbde2b 5097/* Cause next statement to emit a line note even if the line number
bccd9980 5098 has not changed. */
15bbde2b 5099
5100void
35cb5232 5101force_next_line_note (void)
15bbde2b 5102{
7bd3dcc4 5103 last_location = -1;
15bbde2b 5104}
f1934a33 5105
5106/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 5107 note of this type already exists, remove it first. */
f1934a33 5108
c080d8f0 5109rtx
35cb5232 5110set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
f1934a33 5111{
5112 rtx note = find_reg_note (insn, kind, NULL_RTX);
5113
7e6224ab 5114 switch (kind)
5115 {
5116 case REG_EQUAL:
5117 case REG_EQUIV:
5118 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5119 has multiple sets (some callers assume single_set
5120 means the insn only has one set, when in fact it
5121 means the insn only has one * useful * set). */
5122 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5123 {
611234b4 5124 gcc_assert (!note);
7e6224ab 5125 return NULL_RTX;
5126 }
5127
5128 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5129 It serves no useful purpose and breaks eliminate_regs. */
5130 if (GET_CODE (datum) == ASM_OPERANDS)
5131 return NULL_RTX;
3072d30e 5132
5133 if (note)
5134 {
5135 XEXP (note, 0) = datum;
5136 df_notes_rescan (insn);
5137 return note;
5138 }
7e6224ab 5139 break;
5140
5141 default:
3072d30e 5142 if (note)
5143 {
5144 XEXP (note, 0) = datum;
5145 return note;
5146 }
7e6224ab 5147 break;
5148 }
c080d8f0 5149
a1ddb869 5150 add_reg_note (insn, kind, datum);
3072d30e 5151
5152 switch (kind)
c080d8f0 5153 {
3072d30e 5154 case REG_EQUAL:
5155 case REG_EQUIV:
5156 df_notes_rescan (insn);
5157 break;
5158 default:
5159 break;
c080d8f0 5160 }
f1934a33 5161
c080d8f0 5162 return REG_NOTES (insn);
f1934a33 5163}
15bbde2b 5164\f
5165/* Return an indication of which type of insn should have X as a body.
5166 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5167
9b69f75b 5168static enum rtx_code
35cb5232 5169classify_insn (rtx x)
15bbde2b 5170{
6d7dc5b9 5171 if (LABEL_P (x))
15bbde2b 5172 return CODE_LABEL;
5173 if (GET_CODE (x) == CALL)
5174 return CALL_INSN;
5175 if (GET_CODE (x) == RETURN)
5176 return JUMP_INSN;
5177 if (GET_CODE (x) == SET)
5178 {
5179 if (SET_DEST (x) == pc_rtx)
5180 return JUMP_INSN;
5181 else if (GET_CODE (SET_SRC (x)) == CALL)
5182 return CALL_INSN;
5183 else
5184 return INSN;
5185 }
5186 if (GET_CODE (x) == PARALLEL)
5187 {
19cb6b50 5188 int j;
15bbde2b 5189 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5190 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5191 return CALL_INSN;
5192 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5193 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5194 return JUMP_INSN;
5195 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5196 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5197 return CALL_INSN;
5198 }
5199 return INSN;
5200}
5201
5202/* Emit the rtl pattern X as an appropriate kind of insn.
5203 If X is a label, it is simply added into the insn chain. */
5204
5205rtx
35cb5232 5206emit (rtx x)
15bbde2b 5207{
5208 enum rtx_code code = classify_insn (x);
5209
611234b4 5210 switch (code)
15bbde2b 5211 {
611234b4 5212 case CODE_LABEL:
5213 return emit_label (x);
5214 case INSN:
5215 return emit_insn (x);
5216 case JUMP_INSN:
5217 {
5218 rtx insn = emit_jump_insn (x);
5219 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5220 return emit_barrier ();
5221 return insn;
5222 }
5223 case CALL_INSN:
5224 return emit_call_insn (x);
9845d120 5225 case DEBUG_INSN:
5226 return emit_debug_insn (x);
611234b4 5227 default:
5228 gcc_unreachable ();
15bbde2b 5229 }
15bbde2b 5230}
5231\f
1f3233d1 5232/* Space for free sequence stack entries. */
7035b2ab 5233static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
1f3233d1 5234
735f4358 5235/* Begin emitting insns to a sequence. If this sequence will contain
5236 something that might cause the compiler to pop arguments to function
5237 calls (because those pops have previously been deferred; see
5238 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5239 before calling this function. That will ensure that the deferred
5240 pops are not accidentally emitted in the middle of this sequence. */
15bbde2b 5241
5242void
35cb5232 5243start_sequence (void)
15bbde2b 5244{
5245 struct sequence_stack *tem;
5246
1f3233d1 5247 if (free_sequence_stack != NULL)
5248 {
5249 tem = free_sequence_stack;
5250 free_sequence_stack = tem->next;
5251 }
5252 else
2457c754 5253 tem = GGC_NEW (struct sequence_stack);
15bbde2b 5254
0a893c29 5255 tem->next = seq_stack;
15bbde2b 5256 tem->first = first_insn;
5257 tem->last = last_insn;
5258
0a893c29 5259 seq_stack = tem;
15bbde2b 5260
5261 first_insn = 0;
5262 last_insn = 0;
5263}
5264
b49854c6 5265/* Set up the insn chain starting with FIRST as the current sequence,
5266 saving the previously current one. See the documentation for
5267 start_sequence for more information about how to use this function. */
15bbde2b 5268
5269void
35cb5232 5270push_to_sequence (rtx first)
15bbde2b 5271{
5272 rtx last;
5273
5274 start_sequence ();
5275
5276 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5277
5278 first_insn = first;
5279 last_insn = last;
5280}
5281
28bf151d 5282/* Like push_to_sequence, but take the last insn as an argument to avoid
5283 looping through the list. */
5284
5285void
5286push_to_sequence2 (rtx first, rtx last)
5287{
5288 start_sequence ();
5289
5290 first_insn = first;
5291 last_insn = last;
5292}
5293
ab74c92f 5294/* Set up the outer-level insn chain
5295 as the current sequence, saving the previously current one. */
5296
5297void
35cb5232 5298push_topmost_sequence (void)
ab74c92f 5299{
2041cfd9 5300 struct sequence_stack *stack, *top = NULL;
ab74c92f 5301
5302 start_sequence ();
5303
0a893c29 5304 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5305 top = stack;
5306
5307 first_insn = top->first;
5308 last_insn = top->last;
5309}
5310
5311/* After emitting to the outer-level insn chain, update the outer-level
5312 insn chain, and restore the previous saved state. */
5313
5314void
35cb5232 5315pop_topmost_sequence (void)
ab74c92f 5316{
2041cfd9 5317 struct sequence_stack *stack, *top = NULL;
ab74c92f 5318
0a893c29 5319 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5320 top = stack;
5321
5322 top->first = first_insn;
5323 top->last = last_insn;
5324
5325 end_sequence ();
5326}
5327
15bbde2b 5328/* After emitting to a sequence, restore previous saved state.
5329
b49854c6 5330 To get the contents of the sequence just made, you must call
31d3e01c 5331 `get_insns' *before* calling here.
b49854c6 5332
5333 If the compiler might have deferred popping arguments while
5334 generating this sequence, and this sequence will not be immediately
5335 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 5336 before calling get_insns. That will ensure that the deferred
b49854c6 5337 pops are inserted into this sequence, and not into some random
5338 location in the instruction stream. See INHIBIT_DEFER_POP for more
5339 information about deferred popping of arguments. */
15bbde2b 5340
5341void
35cb5232 5342end_sequence (void)
15bbde2b 5343{
0a893c29 5344 struct sequence_stack *tem = seq_stack;
15bbde2b 5345
5346 first_insn = tem->first;
5347 last_insn = tem->last;
0a893c29 5348 seq_stack = tem->next;
15bbde2b 5349
1f3233d1 5350 memset (tem, 0, sizeof (*tem));
5351 tem->next = free_sequence_stack;
5352 free_sequence_stack = tem;
15bbde2b 5353}
5354
5355/* Return 1 if currently emitting into a sequence. */
5356
5357int
35cb5232 5358in_sequence_p (void)
15bbde2b 5359{
0a893c29 5360 return seq_stack != 0;
15bbde2b 5361}
15bbde2b 5362\f
02ebfa52 5363/* Put the various virtual registers into REGNO_REG_RTX. */
5364
2f3874ce 5365static void
b079a207 5366init_virtual_regs (void)
02ebfa52 5367{
b079a207 5368 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5369 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5370 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5371 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5372 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
0a893c29 5373}
5374
928d57e3 5375\f
5376/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5377static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5378static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5379static int copy_insn_n_scratches;
5380
5381/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5382 copied an ASM_OPERANDS.
5383 In that case, it is the original input-operand vector. */
5384static rtvec orig_asm_operands_vector;
5385
5386/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5387 copied an ASM_OPERANDS.
5388 In that case, it is the copied input-operand vector. */
5389static rtvec copy_asm_operands_vector;
5390
5391/* Likewise for the constraints vector. */
5392static rtvec orig_asm_constraints_vector;
5393static rtvec copy_asm_constraints_vector;
5394
5395/* Recursively create a new copy of an rtx for copy_insn.
5396 This function differs from copy_rtx in that it handles SCRATCHes and
5397 ASM_OPERANDs properly.
5398 Normally, this function is not used directly; use copy_insn as front end.
5399 However, you could first copy an insn pattern with copy_insn and then use
5400 this function afterwards to properly copy any REG_NOTEs containing
5401 SCRATCHes. */
5402
5403rtx
35cb5232 5404copy_insn_1 (rtx orig)
928d57e3 5405{
19cb6b50 5406 rtx copy;
5407 int i, j;
5408 RTX_CODE code;
5409 const char *format_ptr;
928d57e3 5410
25e880b1 5411 if (orig == NULL)
5412 return NULL;
5413
928d57e3 5414 code = GET_CODE (orig);
5415
5416 switch (code)
5417 {
5418 case REG:
928d57e3 5419 case CONST_INT:
5420 case CONST_DOUBLE:
e397ad8e 5421 case CONST_FIXED:
886cfd4f 5422 case CONST_VECTOR:
928d57e3 5423 case SYMBOL_REF:
5424 case CODE_LABEL:
5425 case PC:
5426 case CC0:
928d57e3 5427 return orig;
c09425a0 5428 case CLOBBER:
5429 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5430 return orig;
5431 break;
928d57e3 5432
5433 case SCRATCH:
5434 for (i = 0; i < copy_insn_n_scratches; i++)
5435 if (copy_insn_scratch_in[i] == orig)
5436 return copy_insn_scratch_out[i];
5437 break;
5438
5439 case CONST:
3072d30e 5440 if (shared_const_p (orig))
928d57e3 5441 return orig;
5442 break;
d823ba47 5443
928d57e3 5444 /* A MEM with a constant address is not sharable. The problem is that
5445 the constant address may need to be reloaded. If the mem is shared,
5446 then reloading one copy of this mem will cause all copies to appear
5447 to have been reloaded. */
5448
5449 default:
5450 break;
5451 }
5452
f2d0e9f1 5453 /* Copy the various flags, fields, and other information. We assume
5454 that all fields need copying, and then clear the fields that should
928d57e3 5455 not be copied. That is the sensible default behavior, and forces
5456 us to explicitly document why we are *not* copying a flag. */
f2d0e9f1 5457 copy = shallow_copy_rtx (orig);
928d57e3 5458
5459 /* We do not copy the USED flag, which is used as a mark bit during
5460 walks over the RTL. */
7c25cb91 5461 RTX_FLAG (copy, used) = 0;
928d57e3 5462
5463 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
6720e96c 5464 if (INSN_P (orig))
928d57e3 5465 {
7c25cb91 5466 RTX_FLAG (copy, jump) = 0;
5467 RTX_FLAG (copy, call) = 0;
5468 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5469 }
d823ba47 5470
928d57e3 5471 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5472
5473 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
f2d0e9f1 5474 switch (*format_ptr++)
5475 {
5476 case 'e':
5477 if (XEXP (orig, i) != NULL)
5478 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5479 break;
928d57e3 5480
f2d0e9f1 5481 case 'E':
5482 case 'V':
5483 if (XVEC (orig, i) == orig_asm_constraints_vector)
5484 XVEC (copy, i) = copy_asm_constraints_vector;
5485 else if (XVEC (orig, i) == orig_asm_operands_vector)
5486 XVEC (copy, i) = copy_asm_operands_vector;
5487 else if (XVEC (orig, i) != NULL)
5488 {
5489 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5490 for (j = 0; j < XVECLEN (copy, i); j++)
5491 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5492 }
5493 break;
928d57e3 5494
f2d0e9f1 5495 case 't':
5496 case 'w':
5497 case 'i':
5498 case 's':
5499 case 'S':
5500 case 'u':
5501 case '0':
5502 /* These are left unchanged. */
5503 break;
928d57e3 5504
f2d0e9f1 5505 default:
5506 gcc_unreachable ();
5507 }
928d57e3 5508
5509 if (code == SCRATCH)
5510 {
5511 i = copy_insn_n_scratches++;
611234b4 5512 gcc_assert (i < MAX_RECOG_OPERANDS);
928d57e3 5513 copy_insn_scratch_in[i] = orig;
5514 copy_insn_scratch_out[i] = copy;
5515 }
5516 else if (code == ASM_OPERANDS)
5517 {
d91f2122 5518 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5519 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5520 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5521 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5522 }
5523
5524 return copy;
5525}
5526
5527/* Create a new copy of an rtx.
5528 This function differs from copy_rtx in that it handles SCRATCHes and
5529 ASM_OPERANDs properly.
5530 INSN doesn't really have to be a full INSN; it could be just the
5531 pattern. */
5532rtx
35cb5232 5533copy_insn (rtx insn)
928d57e3 5534{
5535 copy_insn_n_scratches = 0;
5536 orig_asm_operands_vector = 0;
5537 orig_asm_constraints_vector = 0;
5538 copy_asm_operands_vector = 0;
5539 copy_asm_constraints_vector = 0;
5540 return copy_insn_1 (insn);
5541}
02ebfa52 5542
15bbde2b 5543/* Initialize data structures and variables in this file
5544 before generating rtl for each function. */
5545
5546void
35cb5232 5547init_emit (void)
15bbde2b 5548{
15bbde2b 5549 first_insn = NULL;
5550 last_insn = NULL;
9845d120 5551 if (MIN_NONDEBUG_INSN_UID)
5552 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5553 else
5554 cur_insn_uid = 1;
5555 cur_debug_insn_uid = 1;
15bbde2b 5556 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
7bd3dcc4 5557 last_location = UNKNOWN_LOCATION;
15bbde2b 5558 first_label_num = label_num;
0a893c29 5559 seq_stack = NULL;
15bbde2b 5560
15bbde2b 5561 /* Init the tables that describe all the pseudo regs. */
5562
fd6ffb7c 5563 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5564
fd6ffb7c 5565 crtl->emit.regno_pointer_align
2457c754 5566 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
d4c332ff 5567
d823ba47 5568 regno_reg_rtx
2457c754 5569 = GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
fcdc122e 5570
936082bb 5571 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5572 memcpy (regno_reg_rtx,
5573 static_regno_reg_rtx,
5574 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5575
15bbde2b 5576 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
b079a207 5577 init_virtual_regs ();
888e0d33 5578
5579 /* Indicate that the virtual registers and stack locations are
5580 all pointers. */
e61a0a7f 5581 REG_POINTER (stack_pointer_rtx) = 1;
5582 REG_POINTER (frame_pointer_rtx) = 1;
5583 REG_POINTER (hard_frame_pointer_rtx) = 1;
5584 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5585
e61a0a7f 5586 REG_POINTER (virtual_incoming_args_rtx) = 1;
5587 REG_POINTER (virtual_stack_vars_rtx) = 1;
5588 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5589 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5590 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5591
d4c332ff 5592#ifdef STACK_BOUNDARY
80909c64 5593 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5594 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5595 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5596 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5597
5598 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5599 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5600 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5601 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5602 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5603#endif
5604
89525da0 5605#ifdef INIT_EXPANDERS
5606 INIT_EXPANDERS;
5607#endif
15bbde2b 5608}
5609
6e68dcb2 5610/* Generate a vector constant for mode MODE and constant value CONSTANT. */
886cfd4f 5611
5612static rtx
6e68dcb2 5613gen_const_vector (enum machine_mode mode, int constant)
886cfd4f 5614{
5615 rtx tem;
5616 rtvec v;
5617 int units, i;
5618 enum machine_mode inner;
5619
5620 units = GET_MODE_NUNITS (mode);
5621 inner = GET_MODE_INNER (mode);
5622
069b07bf 5623 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5624
886cfd4f 5625 v = rtvec_alloc (units);
5626
6e68dcb2 5627 /* We need to call this function after we set the scalar const_tiny_rtx
5628 entries. */
5629 gcc_assert (const_tiny_rtx[constant][(int) inner]);
886cfd4f 5630
5631 for (i = 0; i < units; ++i)
6e68dcb2 5632 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
886cfd4f 5633
9426b612 5634 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5635 return tem;
5636}
5637
9426b612 5638/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6e68dcb2 5639 all elements are zero, and the one vector when all elements are one. */
9426b612 5640rtx
35cb5232 5641gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
9426b612 5642{
6e68dcb2 5643 enum machine_mode inner = GET_MODE_INNER (mode);
5644 int nunits = GET_MODE_NUNITS (mode);
5645 rtx x;
9426b612 5646 int i;
5647
6e68dcb2 5648 /* Check to see if all of the elements have the same value. */
5649 x = RTVEC_ELT (v, nunits - 1);
5650 for (i = nunits - 2; i >= 0; i--)
5651 if (RTVEC_ELT (v, i) != x)
5652 break;
5653
5654 /* If the values are all the same, check to see if we can use one of the
5655 standard constant vectors. */
5656 if (i == -1)
5657 {
5658 if (x == CONST0_RTX (inner))
5659 return CONST0_RTX (mode);
5660 else if (x == CONST1_RTX (inner))
5661 return CONST1_RTX (mode);
5662 }
5663
5664 return gen_rtx_raw_CONST_VECTOR (mode, v);
9426b612 5665}
5666
6d8b68a3 5667/* Initialise global register information required by all functions. */
5668
5669void
5670init_emit_regs (void)
5671{
5672 int i;
5673
5674 /* Reset register attributes */
5675 htab_empty (reg_attrs_htab);
5676
5677 /* We need reg_raw_mode, so initialize the modes now. */
5678 init_reg_modes_target ();
5679
5680 /* Assign register numbers to the globally defined register rtx. */
5681 pc_rtx = gen_rtx_PC (VOIDmode);
5682 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5683 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5684 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5685 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5686 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5687 virtual_incoming_args_rtx =
5688 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5689 virtual_stack_vars_rtx =
5690 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5691 virtual_stack_dynamic_rtx =
5692 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5693 virtual_outgoing_args_rtx =
5694 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5695 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5696
5697 /* Initialize RTL for commonly used hard registers. These are
5698 copied into regno_reg_rtx as we begin to compile each function. */
5699 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5700 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5701
5702#ifdef RETURN_ADDRESS_POINTER_REGNUM
5703 return_address_pointer_rtx
5704 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5705#endif
5706
6d8b68a3 5707 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5708 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5709 else
5710 pic_offset_table_rtx = NULL_RTX;
5711}
5712
01703575 5713/* Create some permanent unique rtl objects shared between all functions. */
15bbde2b 5714
5715void
01703575 5716init_emit_once (void)
15bbde2b 5717{
5718 int i;
5719 enum machine_mode mode;
9e042f31 5720 enum machine_mode double_mode;
15bbde2b 5721
e397ad8e 5722 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5723 hash tables. */
573aba85 5724 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5725 const_int_htab_eq, NULL);
c6259b83 5726
573aba85 5727 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5728 const_double_htab_eq, NULL);
2ff23ed0 5729
e397ad8e 5730 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5731 const_fixed_htab_eq, NULL);
5732
573aba85 5733 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5734 mem_attrs_htab_eq, NULL);
ca74b940 5735 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5736 reg_attrs_htab_eq, NULL);
77695070 5737
71d7daa2 5738 /* Compute the word and byte modes. */
5739
5740 byte_mode = VOIDmode;
5741 word_mode = VOIDmode;
5742 double_mode = VOIDmode;
5743
069b07bf 5744 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5745 mode != VOIDmode;
71d7daa2 5746 mode = GET_MODE_WIDER_MODE (mode))
5747 {
5748 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5749 && byte_mode == VOIDmode)
5750 byte_mode = mode;
5751
5752 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5753 && word_mode == VOIDmode)
5754 word_mode = mode;
5755 }
5756
069b07bf 5757 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5758 mode != VOIDmode;
71d7daa2 5759 mode = GET_MODE_WIDER_MODE (mode))
5760 {
5761 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5762 && double_mode == VOIDmode)
5763 double_mode = mode;
5764 }
5765
5766 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5767
57c097d5 5768#ifdef INIT_EXPANDERS
ab5beff9 5769 /* This is to initialize {init|mark|free}_machine_status before the first
5770 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5771 end which calls push_function_context_to before the first call to
57c097d5 5772 init_function_start. */
5773 INIT_EXPANDERS;
5774#endif
5775
15bbde2b 5776 /* Create the unique rtx's for certain rtx codes and operand values. */
5777
8fd5918e 5778 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
7014838c 5779 tries to use these variables. */
15bbde2b 5780 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5781 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5782 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5783
1a60f06a 5784 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5785 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5786 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5787 else
3ad7bb1c 5788 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5789
2ff23ed0 5790 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5791 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5792 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
3fa759a9 5793
5794 dconstm1 = dconst1;
5795 dconstm1.sign = 1;
77e89269 5796
5797 dconsthalf = dconst1;
9d96125b 5798 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
15bbde2b 5799
8918c507 5800 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
15bbde2b 5801 {
3fa759a9 5802 const REAL_VALUE_TYPE *const r =
badfe841 5803 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5804
069b07bf 5805 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5806 mode != VOIDmode;
5807 mode = GET_MODE_WIDER_MODE (mode))
5808 const_tiny_rtx[i][(int) mode] =
5809 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5810
5811 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5812 mode != VOIDmode;
15bbde2b 5813 mode = GET_MODE_WIDER_MODE (mode))
2ff23ed0 5814 const_tiny_rtx[i][(int) mode] =
5815 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
15bbde2b 5816
b572011e 5817 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5818
069b07bf 5819 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5820 mode != VOIDmode;
15bbde2b 5821 mode = GET_MODE_WIDER_MODE (mode))
b572011e 5822 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5823
5824 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5825 mode != VOIDmode;
5826 mode = GET_MODE_WIDER_MODE (mode))
5827 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5828 }
5829
4248fc32 5830 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5831 mode != VOIDmode;
5832 mode = GET_MODE_WIDER_MODE (mode))
5833 {
5834 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5835 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5836 }
5837
5838 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5839 mode != VOIDmode;
5840 mode = GET_MODE_WIDER_MODE (mode))
5841 {
5842 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5843 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5844 }
5845
886cfd4f 5846 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5847 mode != VOIDmode;
5848 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5849 {
5850 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5851 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5852 }
886cfd4f 5853
5854 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5855 mode != VOIDmode;
5856 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5857 {
5858 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5859 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5860 }
886cfd4f 5861
06f0b99c 5862 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5863 mode != VOIDmode;
5864 mode = GET_MODE_WIDER_MODE (mode))
5865 {
5866 FCONST0(mode).data.high = 0;
5867 FCONST0(mode).data.low = 0;
5868 FCONST0(mode).mode = mode;
e397ad8e 5869 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5870 FCONST0 (mode), mode);
06f0b99c 5871 }
5872
5873 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5874 mode != VOIDmode;
5875 mode = GET_MODE_WIDER_MODE (mode))
5876 {
5877 FCONST0(mode).data.high = 0;
5878 FCONST0(mode).data.low = 0;
5879 FCONST0(mode).mode = mode;
e397ad8e 5880 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5881 FCONST0 (mode), mode);
06f0b99c 5882 }
5883
5884 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5885 mode != VOIDmode;
5886 mode = GET_MODE_WIDER_MODE (mode))
5887 {
5888 FCONST0(mode).data.high = 0;
5889 FCONST0(mode).data.low = 0;
5890 FCONST0(mode).mode = mode;
e397ad8e 5891 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5892 FCONST0 (mode), mode);
06f0b99c 5893
5894 /* We store the value 1. */
5895 FCONST1(mode).data.high = 0;
5896 FCONST1(mode).data.low = 0;
5897 FCONST1(mode).mode = mode;
5898 lshift_double (1, 0, GET_MODE_FBIT (mode),
5899 2 * HOST_BITS_PER_WIDE_INT,
5900 &FCONST1(mode).data.low,
5901 &FCONST1(mode).data.high,
5902 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5903 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5904 FCONST1 (mode), mode);
06f0b99c 5905 }
5906
5907 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5908 mode != VOIDmode;
5909 mode = GET_MODE_WIDER_MODE (mode))
5910 {
5911 FCONST0(mode).data.high = 0;
5912 FCONST0(mode).data.low = 0;
5913 FCONST0(mode).mode = mode;
e397ad8e 5914 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5915 FCONST0 (mode), mode);
06f0b99c 5916
5917 /* We store the value 1. */
5918 FCONST1(mode).data.high = 0;
5919 FCONST1(mode).data.low = 0;
5920 FCONST1(mode).mode = mode;
5921 lshift_double (1, 0, GET_MODE_FBIT (mode),
5922 2 * HOST_BITS_PER_WIDE_INT,
5923 &FCONST1(mode).data.low,
5924 &FCONST1(mode).data.high,
5925 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5926 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5927 FCONST1 (mode), mode);
5928 }
5929
5930 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5931 mode != VOIDmode;
5932 mode = GET_MODE_WIDER_MODE (mode))
5933 {
5934 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5935 }
5936
5937 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5938 mode != VOIDmode;
5939 mode = GET_MODE_WIDER_MODE (mode))
5940 {
5941 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5942 }
5943
5944 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5945 mode != VOIDmode;
5946 mode = GET_MODE_WIDER_MODE (mode))
5947 {
5948 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5949 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5950 }
5951
5952 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5953 mode != VOIDmode;
5954 mode = GET_MODE_WIDER_MODE (mode))
5955 {
5956 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5957 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
06f0b99c 5958 }
5959
0fd4500a 5960 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5961 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5962 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 5963
065336b4 5964 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5965 if (STORE_FLAG_VALUE == 1)
5966 const_tiny_rtx[1][(int) BImode] = const1_rtx;
15bbde2b 5967}
ac6c481d 5968\f
cd0fe062 5969/* Produce exact duplicate of insn INSN after AFTER.
5970 Care updating of libcall regions if present. */
5971
5972rtx
35cb5232 5973emit_copy_of_insn_after (rtx insn, rtx after)
cd0fe062 5974{
9ce37fa7 5975 rtx new_rtx, link;
cd0fe062 5976
5977 switch (GET_CODE (insn))
5978 {
5979 case INSN:
9ce37fa7 5980 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5981 break;
5982
5983 case JUMP_INSN:
9ce37fa7 5984 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5985 break;
5986
9845d120 5987 case DEBUG_INSN:
5988 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5989 break;
5990
cd0fe062 5991 case CALL_INSN:
9ce37fa7 5992 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5993 if (CALL_INSN_FUNCTION_USAGE (insn))
9ce37fa7 5994 CALL_INSN_FUNCTION_USAGE (new_rtx)
cd0fe062 5995 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
9ce37fa7 5996 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5997 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5998 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5999 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
9c2a0c05 6000 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
cd0fe062 6001 break;
6002
6003 default:
611234b4 6004 gcc_unreachable ();
cd0fe062 6005 }
6006
6007 /* Update LABEL_NUSES. */
9ce37fa7 6008 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
cd0fe062 6009
9ce37fa7 6010 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ab87d1bc 6011
98116afd 6012 /* If the old insn is frame related, then so is the new one. This is
6013 primarily needed for IA-64 unwind info which marks epilogue insns,
6014 which may be duplicated by the basic block reordering code. */
9ce37fa7 6015 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
98116afd 6016
19d2fe05 6017 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6018 will make them. REG_LABEL_TARGETs are created there too, but are
6019 supposed to be sticky, so we copy them. */
cd0fe062 6020 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
19d2fe05 6021 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
cd0fe062 6022 {
6023 if (GET_CODE (link) == EXPR_LIST)
9ce37fa7 6024 add_reg_note (new_rtx, REG_NOTE_KIND (link),
a1ddb869 6025 copy_insn_1 (XEXP (link, 0)));
cd0fe062 6026 else
9ce37fa7 6027 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
cd0fe062 6028 }
6029
9ce37fa7 6030 INSN_CODE (new_rtx) = INSN_CODE (insn);
6031 return new_rtx;
cd0fe062 6032}
1f3233d1 6033
7035b2ab 6034static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
c09425a0 6035rtx
6036gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6037{
6038 if (hard_reg_clobbers[mode][regno])
6039 return hard_reg_clobbers[mode][regno];
6040 else
6041 return (hard_reg_clobbers[mode][regno] =
6042 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6043}
6044
1f3233d1 6045#include "gt-emit-rtl.h"