]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
2010-09-06 Richard Guenther <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
ddca3e9d 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3e052aec 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010
535664e3 5 Free Software Foundation, Inc.
15bbde2b 6
f12b58b3 7This file is part of GCC.
15bbde2b 8
f12b58b3 9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
8c4c00c1 11Software Foundation; either version 3, or (at your option) any later
f12b58b3 12version.
15bbde2b 13
f12b58b3 14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
15bbde2b 18
19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
15bbde2b 22
23
24/* Middle-to-low level generation of rtx code and insns.
25
74efa612 26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
15bbde2b 28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
74efa612 31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
8fd5918e 34 dependent is the kind of rtx's they make and what arguments they
35 use. */
15bbde2b 36
37#include "config.h"
405711de 38#include "system.h"
805e22b2 39#include "coretypes.h"
40#include "tm.h"
0b205f4c 41#include "diagnostic-core.h"
d3b64f2d 42#include "toplev.h"
15bbde2b 43#include "rtl.h"
3fd7e17f 44#include "tree.h"
7953c610 45#include "tm_p.h"
15bbde2b 46#include "flags.h"
47#include "function.h"
48#include "expr.h"
49#include "regs.h"
c6b6c51f 50#include "hard-reg-set.h"
73f5c1e3 51#include "hashtab.h"
15bbde2b 52#include "insn-config.h"
0dbd1c74 53#include "recog.h"
a3426c4c 54#include "bitmap.h"
f3d96a58 55#include "basic-block.h"
a7b0c170 56#include "ggc.h"
b29760a8 57#include "debug.h"
b0278d39 58#include "langhooks.h"
77fce4cd 59#include "tree-pass.h"
3072d30e 60#include "df.h"
9845d120 61#include "params.h"
98155838 62#include "target.h"
649d8da6 63
679bcc8d 64struct target_rtl default_target_rtl;
65#if SWITCHABLE_TARGET
66struct target_rtl *this_target_rtl = &default_target_rtl;
67#endif
68
69#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
399d45d3 71/* Commonly used modes. */
72
a92771b8 73enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9e042f31 75enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
a92771b8 76enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 77
b079a207 78/* Datastructures maintained for currently processed function in RTL form. */
79
fd6ffb7c 80struct rtl_data x_rtl;
b079a207 81
82/* Indexed by pseudo register number, gives the rtx for that pseudo.
48e1416a 83 Allocated in parallel with regno_pointer_align.
b079a207 84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87rtx * regno_reg_rtx;
15bbde2b 88
89/* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
9105005a 92static GTY(()) int label_num = 1;
15bbde2b 93
15bbde2b 94/* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
97
98rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99
1a60f06a 100rtx const_true_rtx;
101
15bbde2b 102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
77e89269 106REAL_VALUE_TYPE dconsthalf;
15bbde2b 107
06f0b99c 108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
15bbde2b 112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
57c097d5 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 118
73f5c1e3 119/* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
121
1f3233d1 122static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
123 htab_t const_int_htab;
73f5c1e3 124
c6259b83 125/* A hash table storing memory attribute structures. */
1f3233d1 126static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
127 htab_t mem_attrs_htab;
c6259b83 128
ca74b940 129/* A hash table storing register attribute structures. */
130static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
132
2ff23ed0 133/* A hash table storing all CONST_DOUBLEs. */
1f3233d1 134static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
2ff23ed0 136
e397ad8e 137/* A hash table storing all CONST_FIXEDs. */
138static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
140
fd6ffb7c 141#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
9845d120 142#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
fd6ffb7c 143#define last_location (crtl->emit.x_last_location)
144#define first_label_num (crtl->emit.x_first_label_num)
15bbde2b 145
35cb5232 146static rtx make_call_insn_raw (rtx);
35cb5232 147static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
265be050 148static void set_used_decls (tree);
35cb5232 149static void mark_label_nuses (rtx);
150static hashval_t const_int_htab_hash (const void *);
151static int const_int_htab_eq (const void *, const void *);
152static hashval_t const_double_htab_hash (const void *);
153static int const_double_htab_eq (const void *, const void *);
154static rtx lookup_const_double (rtx);
e397ad8e 155static hashval_t const_fixed_htab_hash (const void *);
156static int const_fixed_htab_eq (const void *, const void *);
157static rtx lookup_const_fixed (rtx);
35cb5232 158static hashval_t mem_attrs_htab_hash (const void *);
159static int mem_attrs_htab_eq (const void *, const void *);
32c2fdea 160static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
bd1a81f7 161 addr_space_t, enum machine_mode);
35cb5232 162static hashval_t reg_attrs_htab_hash (const void *);
163static int reg_attrs_htab_eq (const void *, const void *);
164static reg_attrs *get_reg_attrs (tree, int);
6e68dcb2 165static rtx gen_const_vector (enum machine_mode, int);
0e0727c4 166static void copy_rtx_if_shared_1 (rtx *orig);
73f5c1e3 167
3cd757b1 168/* Probability of the conditional branch currently proceeded by try_split.
169 Set to -1 otherwise. */
170int split_branch_probability = -1;
649d8da6 171\f
73f5c1e3 172/* Returns a hash code for X (which is a really a CONST_INT). */
173
174static hashval_t
35cb5232 175const_int_htab_hash (const void *x)
73f5c1e3 176{
dd9b9fc5 177 return (hashval_t) INTVAL ((const_rtx) x);
73f5c1e3 178}
179
6ef828f9 180/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 181 CONST_INT) is the same as that given by Y (which is really a
182 HOST_WIDE_INT *). */
183
184static int
35cb5232 185const_int_htab_eq (const void *x, const void *y)
73f5c1e3 186{
dd9b9fc5 187 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
2ff23ed0 188}
189
190/* Returns a hash code for X (which is really a CONST_DOUBLE). */
191static hashval_t
35cb5232 192const_double_htab_hash (const void *x)
2ff23ed0 193{
dd9b9fc5 194 const_rtx const value = (const_rtx) x;
3393215f 195 hashval_t h;
2ff23ed0 196
3393215f 197 if (GET_MODE (value) == VOIDmode)
198 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
199 else
a5760913 200 {
e2e205b3 201 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 202 /* MODE is used in the comparison, so it should be in the hash. */
203 h ^= GET_MODE (value);
204 }
2ff23ed0 205 return h;
206}
207
6ef828f9 208/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 209 is the same as that represented by Y (really a ...) */
210static int
35cb5232 211const_double_htab_eq (const void *x, const void *y)
2ff23ed0 212{
dd9b9fc5 213 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
2ff23ed0 214
215 if (GET_MODE (a) != GET_MODE (b))
216 return 0;
f82a103d 217 if (GET_MODE (a) == VOIDmode)
218 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
219 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
220 else
221 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
222 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 223}
224
e397ad8e 225/* Returns a hash code for X (which is really a CONST_FIXED). */
226
227static hashval_t
228const_fixed_htab_hash (const void *x)
229{
a9f1838b 230 const_rtx const value = (const_rtx) x;
e397ad8e 231 hashval_t h;
232
233 h = fixed_hash (CONST_FIXED_VALUE (value));
234 /* MODE is used in the comparison, so it should be in the hash. */
235 h ^= GET_MODE (value);
236 return h;
237}
238
239/* Returns nonzero if the value represented by X (really a ...)
240 is the same as that represented by Y (really a ...). */
241
242static int
243const_fixed_htab_eq (const void *x, const void *y)
244{
a9f1838b 245 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
e397ad8e 246
247 if (GET_MODE (a) != GET_MODE (b))
248 return 0;
249 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
250}
251
c6259b83 252/* Returns a hash code for X (which is a really a mem_attrs *). */
253
254static hashval_t
35cb5232 255mem_attrs_htab_hash (const void *x)
c6259b83 256{
dd9b9fc5 257 const mem_attrs *const p = (const mem_attrs *) x;
c6259b83 258
259 return (p->alias ^ (p->align * 1000)
bd1a81f7 260 ^ (p->addrspace * 4000)
c6259b83 261 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
262 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
2f16183e 263 ^ (size_t) iterative_hash_expr (p->expr, 0));
c6259b83 264}
265
6ef828f9 266/* Returns nonzero if the value represented by X (which is really a
c6259b83 267 mem_attrs *) is the same as that given by Y (which is also really a
268 mem_attrs *). */
73f5c1e3 269
270static int
35cb5232 271mem_attrs_htab_eq (const void *x, const void *y)
73f5c1e3 272{
aae87fc3 273 const mem_attrs *const p = (const mem_attrs *) x;
274 const mem_attrs *const q = (const mem_attrs *) y;
c6259b83 275
2f16183e 276 return (p->alias == q->alias && p->offset == q->offset
277 && p->size == q->size && p->align == q->align
bd1a81f7 278 && p->addrspace == q->addrspace
2f16183e 279 && (p->expr == q->expr
280 || (p->expr != NULL_TREE && q->expr != NULL_TREE
281 && operand_equal_p (p->expr, q->expr, 0))));
73f5c1e3 282}
283
c6259b83 284/* Allocate a new mem_attrs structure and insert it into the hash table if
5cc193e7 285 one identical to it is not already in the table. We are doing this for
286 MEM of mode MODE. */
c6259b83 287
288static mem_attrs *
32c2fdea 289get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
bd1a81f7 290 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
c6259b83 291{
292 mem_attrs attrs;
293 void **slot;
294
d5c80165 295 /* If everything is the default, we can just return zero.
296 This must match what the corresponding MEM_* macros return when the
297 field is not present. */
bd1a81f7 298 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
5cc193e7 299 && (size == 0
300 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
d5c80165 301 && (STRICT_ALIGNMENT && mode != BLKmode
302 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
5cc193e7 303 return 0;
304
c6259b83 305 attrs.alias = alias;
b10dbbca 306 attrs.expr = expr;
c6259b83 307 attrs.offset = offset;
308 attrs.size = size;
309 attrs.align = align;
bd1a81f7 310 attrs.addrspace = addrspace;
c6259b83 311
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
313 if (*slot == 0)
314 {
ba72912a 315 *slot = ggc_alloc_mem_attrs ();
c6259b83 316 memcpy (*slot, &attrs, sizeof (mem_attrs));
317 }
318
2457c754 319 return (mem_attrs *) *slot;
73f5c1e3 320}
321
ca74b940 322/* Returns a hash code for X (which is a really a reg_attrs *). */
323
324static hashval_t
35cb5232 325reg_attrs_htab_hash (const void *x)
ca74b940 326{
aae87fc3 327 const reg_attrs *const p = (const reg_attrs *) x;
ca74b940 328
329 return ((p->offset * 1000) ^ (long) p->decl);
330}
331
7ef5b942 332/* Returns nonzero if the value represented by X (which is really a
ca74b940 333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
335
336static int
35cb5232 337reg_attrs_htab_eq (const void *x, const void *y)
ca74b940 338{
aae87fc3 339 const reg_attrs *const p = (const reg_attrs *) x;
340 const reg_attrs *const q = (const reg_attrs *) y;
ca74b940 341
342 return (p->decl == q->decl && p->offset == q->offset);
343}
344/* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
347
348static reg_attrs *
35cb5232 349get_reg_attrs (tree decl, int offset)
ca74b940 350{
351 reg_attrs attrs;
352 void **slot;
353
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
357
358 attrs.decl = decl;
359 attrs.offset = offset;
360
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
363 {
ba72912a 364 *slot = ggc_alloc_reg_attrs ();
ca74b940 365 memcpy (*slot, &attrs, sizeof (reg_attrs));
366 }
367
2457c754 368 return (reg_attrs *) *slot;
ca74b940 369}
370
3072d30e 371
372#if !HAVE_blockage
373/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
374 across this insn. */
375
376rtx
377gen_blockage (void)
378{
379 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
380 MEM_VOLATILE_P (x) = true;
381 return x;
382}
383#endif
384
385
22cf44bc 386/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
387 don't attempt to share with the various global pieces of rtl (such as
388 frame_pointer_rtx). */
389
390rtx
35cb5232 391gen_raw_REG (enum machine_mode mode, int regno)
22cf44bc 392{
393 rtx x = gen_rtx_raw_REG (mode, regno);
394 ORIGINAL_REGNO (x) = regno;
395 return x;
396}
397
7014838c 398/* There are some RTL codes that require special attention; the generation
399 functions do the raw handling. If you add to this list, modify
400 special_rtx in gengenrtl.c as well. */
401
3ad7bb1c 402rtx
35cb5232 403gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3ad7bb1c 404{
73f5c1e3 405 void **slot;
406
3ad7bb1c 407 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 408 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 409
410#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
411 if (const_true_rtx && arg == STORE_FLAG_VALUE)
412 return const_true_rtx;
413#endif
414
73f5c1e3 415 /* Look up the CONST_INT in the hash table. */
2b3dbc20 416 slot = htab_find_slot_with_hash (const_int_htab, &arg,
417 (hashval_t) arg, INSERT);
7f2875d3 418 if (*slot == 0)
d7c47c0e 419 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 420
421 return (rtx) *slot;
3ad7bb1c 422}
423
2d232d05 424rtx
35cb5232 425gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2d232d05 426{
427 return GEN_INT (trunc_int_for_mode (c, mode));
428}
429
2ff23ed0 430/* CONST_DOUBLEs might be created from pairs of integers, or from
431 REAL_VALUE_TYPEs. Also, their length is known only at run time,
432 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
433
434/* Determine whether REAL, a CONST_DOUBLE, already exists in the
435 hash table. If so, return its counterpart; otherwise add it
436 to the hash table and return it. */
437static rtx
35cb5232 438lookup_const_double (rtx real)
2ff23ed0 439{
440 void **slot = htab_find_slot (const_double_htab, real, INSERT);
441 if (*slot == 0)
442 *slot = real;
443
444 return (rtx) *slot;
445}
7f2875d3 446
2ff23ed0 447/* Return a CONST_DOUBLE rtx for a floating-point value specified by
448 VALUE in mode MODE. */
67f2a2eb 449rtx
35cb5232 450const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
67f2a2eb 451{
2ff23ed0 452 rtx real = rtx_alloc (CONST_DOUBLE);
453 PUT_MODE (real, mode);
454
e8aaae4e 455 real->u.rv = value;
2ff23ed0 456
457 return lookup_const_double (real);
458}
459
e397ad8e 460/* Determine whether FIXED, a CONST_FIXED, already exists in the
461 hash table. If so, return its counterpart; otherwise add it
462 to the hash table and return it. */
463
464static rtx
465lookup_const_fixed (rtx fixed)
466{
467 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
468 if (*slot == 0)
469 *slot = fixed;
470
471 return (rtx) *slot;
472}
473
474/* Return a CONST_FIXED rtx for a fixed-point value specified by
475 VALUE in mode MODE. */
476
477rtx
478const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
479{
480 rtx fixed = rtx_alloc (CONST_FIXED);
481 PUT_MODE (fixed, mode);
482
483 fixed->u.fv = value;
484
485 return lookup_const_fixed (fixed);
486}
487
33274180 488/* Constructs double_int from rtx CST. */
489
490double_int
491rtx_to_double_int (const_rtx cst)
492{
493 double_int r;
494
495 if (CONST_INT_P (cst))
496 r = shwi_to_double_int (INTVAL (cst));
497 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
498 {
499 r.low = CONST_DOUBLE_LOW (cst);
500 r.high = CONST_DOUBLE_HIGH (cst);
501 }
502 else
503 gcc_unreachable ();
504
505 return r;
506}
507
508
3e052aec 509/* Return a CONST_DOUBLE or CONST_INT for a value specified as
510 a double_int. */
511
512rtx
513immed_double_int_const (double_int i, enum machine_mode mode)
514{
515 return immed_double_const (i.low, i.high, mode);
516}
517
2ff23ed0 518/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
519 of ints: I0 is the low-order word and I1 is the high-order word.
520 Do not use this routine for non-integer modes; convert to
521 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
522
523rtx
35cb5232 524immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
2ff23ed0 525{
526 rtx value;
527 unsigned int i;
528
b1ca4af4 529 /* There are the following cases (note that there are no modes with
530 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
531
532 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
533 gen_int_mode.
534 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
535 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
48e1416a 536 from copies of the sign bit, and sign of i0 and i1 are the same), then
b1ca4af4 537 we return a CONST_INT for i0.
538 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
2ff23ed0 539 if (mode != VOIDmode)
540 {
611234b4 541 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
542 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
543 /* We can get a 0 for an error mark. */
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
545 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
2ff23ed0 546
b1ca4af4 547 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
548 return gen_int_mode (i0, mode);
549
550 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
2ff23ed0 551 }
552
553 /* If this integer fits in one word, return a CONST_INT. */
554 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
555 return GEN_INT (i0);
556
557 /* We use VOIDmode for integers. */
558 value = rtx_alloc (CONST_DOUBLE);
559 PUT_MODE (value, VOIDmode);
560
561 CONST_DOUBLE_LOW (value) = i0;
562 CONST_DOUBLE_HIGH (value) = i1;
563
564 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
565 XWINT (value, i) = 0;
566
567 return lookup_const_double (value);
67f2a2eb 568}
569
3ad7bb1c 570rtx
35cb5232 571gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3ad7bb1c 572{
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
578
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
583
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
586
587 if (mode == Pmode && !reload_in_progress)
588 {
71801afc 589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 591 return frame_pointer_rtx;
592#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
71801afc 593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 595 return hard_frame_pointer_rtx;
596#endif
597#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
e8b59353 598 if (regno == ARG_POINTER_REGNUM)
3ad7bb1c 599 return arg_pointer_rtx;
600#endif
601#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 603 return return_address_pointer_rtx;
604#endif
3473aefe 605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
6ea47475 606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 607 return pic_offset_table_rtx;
e8b59353 608 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 609 return stack_pointer_rtx;
610 }
611
32b53d83 612#if 0
90295bd2 613 /* If the per-function register table has been set up, try to re-use
32b53d83 614 an existing entry in that table to avoid useless generation of RTL.
615
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
71801afc 619 on the amount of useless RTL that gets generated.
620
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
623
90295bd2 624 if (cfun
625 && cfun->emit
626 && regno_reg_rtx
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
32b53d83 630#endif
90295bd2 631
22cf44bc 632 return gen_raw_REG (mode, regno);
3ad7bb1c 633}
634
b5ba9f3a 635rtx
35cb5232 636gen_rtx_MEM (enum machine_mode mode, rtx addr)
b5ba9f3a 637{
638 rtx rt = gen_rtx_raw_MEM (mode, addr);
639
640 /* This field is not cleared by the mere allocation of the rtx, so
641 we clear it here. */
c6259b83 642 MEM_ATTRS (rt) = 0;
b5ba9f3a 643
644 return rt;
645}
701e46d0 646
e265a6da 647/* Generate a memory referring to non-trapping constant memory. */
648
649rtx
650gen_const_mem (enum machine_mode mode, rtx addr)
651{
652 rtx mem = gen_rtx_MEM (mode, addr);
653 MEM_READONLY_P (mem) = 1;
654 MEM_NOTRAP_P (mem) = 1;
655 return mem;
656}
657
00060fc2 658/* Generate a MEM referring to fixed portions of the frame, e.g., register
659 save areas. */
660
661rtx
662gen_frame_mem (enum machine_mode mode, rtx addr)
663{
664 rtx mem = gen_rtx_MEM (mode, addr);
665 MEM_NOTRAP_P (mem) = 1;
666 set_mem_alias_set (mem, get_frame_alias_set ());
667 return mem;
668}
669
670/* Generate a MEM referring to a temporary use of the stack, not part
671 of the fixed stack frame. For example, something which is pushed
672 by a target splitter. */
673rtx
674gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
675{
676 rtx mem = gen_rtx_MEM (mode, addr);
677 MEM_NOTRAP_P (mem) = 1;
18d50ae6 678 if (!cfun->calls_alloca)
00060fc2 679 set_mem_alias_set (mem, get_frame_alias_set ());
680 return mem;
681}
682
2166bbaa 683/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
684 this construct would be valid, and false otherwise. */
685
686bool
687validate_subreg (enum machine_mode omode, enum machine_mode imode,
7ecb5bb2 688 const_rtx reg, unsigned int offset)
701e46d0 689{
2166bbaa 690 unsigned int isize = GET_MODE_SIZE (imode);
691 unsigned int osize = GET_MODE_SIZE (omode);
692
693 /* All subregs must be aligned. */
694 if (offset % osize != 0)
695 return false;
696
697 /* The subreg offset cannot be outside the inner object. */
698 if (offset >= isize)
699 return false;
700
701 /* ??? This should not be here. Temporarily continue to allow word_mode
702 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
703 Generally, backends are doing something sketchy but it'll take time to
704 fix them all. */
705 if (omode == word_mode)
706 ;
707 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
708 is the culprit here, and not the backends. */
709 else if (osize >= UNITS_PER_WORD && isize >= osize)
710 ;
711 /* Allow component subregs of complex and vector. Though given the below
712 extraction rules, it's not always clear what that means. */
713 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
714 && GET_MODE_INNER (imode) == omode)
715 ;
716 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
717 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
718 represent this. It's questionable if this ought to be represented at
719 all -- why can't this all be hidden in post-reload splitters that make
720 arbitrarily mode changes to the registers themselves. */
721 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
722 ;
723 /* Subregs involving floating point modes are not allowed to
724 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
725 (subreg:SI (reg:DF) 0) isn't. */
726 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
727 {
728 if (isize != osize)
729 return false;
730 }
701e46d0 731
2166bbaa 732 /* Paradoxical subregs must have offset zero. */
733 if (osize > isize)
734 return offset == 0;
735
736 /* This is a normal subreg. Verify that the offset is representable. */
737
738 /* For hard registers, we already have most of these rules collected in
739 subreg_offset_representable_p. */
740 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
741 {
742 unsigned int regno = REGNO (reg);
743
744#ifdef CANNOT_CHANGE_MODE_CLASS
745 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
746 && GET_MODE_INNER (imode) == omode)
747 ;
748 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
749 return false;
701e46d0 750#endif
2166bbaa 751
752 return subreg_offset_representable_p (regno, imode, offset, omode);
753 }
754
755 /* For pseudo registers, we want most of the same checks. Namely:
756 If the register no larger than a word, the subreg must be lowpart.
757 If the register is larger than a word, the subreg must be the lowpart
758 of a subword. A subreg does *not* perform arbitrary bit extraction.
759 Given that we've already checked mode/offset alignment, we only have
760 to check subword subregs here. */
761 if (osize < UNITS_PER_WORD)
762 {
763 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
764 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
765 if (offset % UNITS_PER_WORD != low_off)
766 return false;
767 }
768 return true;
769}
770
771rtx
772gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
773{
774 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
2ff23ed0 775 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 776}
777
c6259b83 778/* Generate a SUBREG representing the least-significant part of REG if MODE
779 is smaller than mode of REG, otherwise paradoxical SUBREG. */
780
701e46d0 781rtx
35cb5232 782gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
701e46d0 783{
784 enum machine_mode inmode;
701e46d0 785
786 inmode = GET_MODE (reg);
787 if (inmode == VOIDmode)
788 inmode = mode;
81802af6 789 return gen_rtx_SUBREG (mode, reg,
790 subreg_lowpart_offset (mode, inmode));
701e46d0 791}
7014838c 792\f
15bbde2b 793
cf9ac040 794/* Create an rtvec and stores within it the RTXen passed in the arguments. */
795
15bbde2b 796rtvec
ee582a61 797gen_rtvec (int n, ...)
15bbde2b 798{
cf9ac040 799 int i;
800 rtvec rt_val;
ee582a61 801 va_list p;
15bbde2b 802
ee582a61 803 va_start (p, n);
15bbde2b 804
cf9ac040 805 /* Don't allocate an empty rtvec... */
15bbde2b 806 if (n == 0)
cf9ac040 807 return NULL_RTVEC;
15bbde2b 808
cf9ac040 809 rt_val = rtvec_alloc (n);
e5fcd76a 810
15bbde2b 811 for (i = 0; i < n; i++)
cf9ac040 812 rt_val->elem[i] = va_arg (p, rtx);
7ad77798 813
ee582a61 814 va_end (p);
cf9ac040 815 return rt_val;
15bbde2b 816}
817
818rtvec
35cb5232 819gen_rtvec_v (int n, rtx *argp)
15bbde2b 820{
19cb6b50 821 int i;
822 rtvec rt_val;
15bbde2b 823
cf9ac040 824 /* Don't allocate an empty rtvec... */
15bbde2b 825 if (n == 0)
cf9ac040 826 return NULL_RTVEC;
15bbde2b 827
cf9ac040 828 rt_val = rtvec_alloc (n);
15bbde2b 829
830 for (i = 0; i < n; i++)
a4070a91 831 rt_val->elem[i] = *argp++;
15bbde2b 832
833 return rt_val;
834}
835\f
80c70e76 836/* Return the number of bytes between the start of an OUTER_MODE
837 in-memory value and the start of an INNER_MODE in-memory value,
838 given that the former is a lowpart of the latter. It may be a
839 paradoxical lowpart, in which case the offset will be negative
840 on big-endian targets. */
841
842int
843byte_lowpart_offset (enum machine_mode outer_mode,
844 enum machine_mode inner_mode)
845{
846 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
847 return subreg_lowpart_offset (outer_mode, inner_mode);
848 else
849 return -subreg_lowpart_offset (inner_mode, outer_mode);
850}
851\f
15bbde2b 852/* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
854
855rtx
35cb5232 856gen_reg_rtx (enum machine_mode mode)
15bbde2b 857{
19cb6b50 858 rtx val;
27a7a23a 859 unsigned int align = GET_MODE_ALIGNMENT (mode);
15bbde2b 860
1b7ff857 861 gcc_assert (can_create_pseudo_p ());
15bbde2b 862
27a7a23a 863 /* If a virtual register with bigger mode alignment is generated,
864 increase stack alignment estimation because it might be spilled
865 to stack later. */
48e1416a 866 if (SUPPORTS_STACK_ALIGNMENT
27a7a23a 867 && crtl->stack_alignment_estimated < align
868 && !crtl->stack_realign_processed)
8645d3e7 869 {
870 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
871 if (crtl->stack_alignment_estimated < min_align)
872 crtl->stack_alignment_estimated = min_align;
873 }
27a7a23a 874
316bc009 875 if (generating_concat_p
876 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
877 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 878 {
879 /* For complex modes, don't make a single pseudo.
880 Instead, make a CONCAT of two pseudos.
881 This allows noncontiguous allocation of the real and imaginary parts,
882 which makes much better code. Besides, allocating DCmode
883 pseudos overstrains reload on some machines like the 386. */
884 rtx realpart, imagpart;
e9e12845 885 enum machine_mode partmode = GET_MODE_INNER (mode);
76c37538 886
887 realpart = gen_reg_rtx (partmode);
888 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 889 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 890 }
891
ca74b940 892 /* Make sure regno_pointer_align, and regno_reg_rtx are large
fcdc122e 893 enough to have an element for this pseudo reg number. */
15bbde2b 894
fd6ffb7c 895 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
15bbde2b 896 {
fd6ffb7c 897 int old_size = crtl->emit.regno_pointer_align_length;
9ce37fa7 898 char *tmp;
fcdc122e 899 rtx *new1;
fcdc122e 900
9ce37fa7 901 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
902 memset (tmp + old_size, 0, old_size);
903 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
0a893c29 904
2457c754 905 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
0a893c29 906 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 907 regno_reg_rtx = new1;
908
fd6ffb7c 909 crtl->emit.regno_pointer_align_length = old_size * 2;
15bbde2b 910 }
911
22cf44bc 912 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 913 regno_reg_rtx[reg_rtx_no++] = val;
914 return val;
915}
916
80c70e76 917/* Update NEW with the same attributes as REG, but with OFFSET added
918 to the REG_OFFSET. */
ca74b940 919
1a6a0f2a 920static void
9ce37fa7 921update_reg_offset (rtx new_rtx, rtx reg, int offset)
ca74b940 922{
9ce37fa7 923 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
35cb5232 924 REG_OFFSET (reg) + offset);
1a6a0f2a 925}
926
80c70e76 927/* Generate a register with same attributes as REG, but with OFFSET
928 added to the REG_OFFSET. */
1a6a0f2a 929
930rtx
931gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
932 int offset)
933{
9ce37fa7 934 rtx new_rtx = gen_rtx_REG (mode, regno);
1a6a0f2a 935
9ce37fa7 936 update_reg_offset (new_rtx, reg, offset);
937 return new_rtx;
1a6a0f2a 938}
939
940/* Generate a new pseudo-register with the same attributes as REG, but
80c70e76 941 with OFFSET added to the REG_OFFSET. */
1a6a0f2a 942
943rtx
944gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
945{
9ce37fa7 946 rtx new_rtx = gen_reg_rtx (mode);
1a6a0f2a 947
9ce37fa7 948 update_reg_offset (new_rtx, reg, offset);
949 return new_rtx;
ca74b940 950}
951
80c70e76 952/* Adjust REG in-place so that it has mode MODE. It is assumed that the
953 new register is a (possibly paradoxical) lowpart of the old one. */
ca74b940 954
955void
80c70e76 956adjust_reg_mode (rtx reg, enum machine_mode mode)
ca74b940 957{
80c70e76 958 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
959 PUT_MODE (reg, mode);
960}
961
962/* Copy REG's attributes from X, if X has any attributes. If REG and X
963 have different modes, REG is a (possibly paradoxical) lowpart of X. */
964
965void
966set_reg_attrs_from_value (rtx reg, rtx x)
967{
968 int offset;
969
ac56145e 970 /* Hard registers can be reused for multiple purposes within the same
971 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
972 on them is wrong. */
973 if (HARD_REGISTER_P (reg))
974 return;
975
80c70e76 976 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
ae12ddda 977 if (MEM_P (x))
978 {
971ba038 979 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
ae12ddda 980 REG_ATTRS (reg)
981 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
982 if (MEM_POINTER (x))
40b93dba 983 mark_reg_pointer (reg, 0);
ae12ddda 984 }
985 else if (REG_P (x))
986 {
987 if (REG_ATTRS (x))
988 update_reg_offset (reg, x, offset);
989 if (REG_POINTER (x))
990 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
991 }
992}
993
994/* Generate a REG rtx for a new pseudo register, copying the mode
995 and attributes from X. */
996
997rtx
998gen_reg_rtx_and_attrs (rtx x)
999{
1000 rtx reg = gen_reg_rtx (GET_MODE (x));
1001 set_reg_attrs_from_value (reg, x);
1002 return reg;
ca74b940 1003}
1004
263c416c 1005/* Set the register attributes for registers contained in PARM_RTX.
1006 Use needed values from memory attributes of MEM. */
1007
1008void
35cb5232 1009set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
263c416c 1010{
8ad4c111 1011 if (REG_P (parm_rtx))
80c70e76 1012 set_reg_attrs_from_value (parm_rtx, mem);
263c416c 1013 else if (GET_CODE (parm_rtx) == PARALLEL)
1014 {
1015 /* Check for a NULL entry in the first slot, used to indicate that the
1016 parameter goes both on the stack and in registers. */
1017 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1018 for (; i < XVECLEN (parm_rtx, 0); i++)
1019 {
1020 rtx x = XVECEXP (parm_rtx, 0, i);
8ad4c111 1021 if (REG_P (XEXP (x, 0)))
263c416c 1022 REG_ATTRS (XEXP (x, 0))
1023 = get_reg_attrs (MEM_EXPR (mem),
1024 INTVAL (XEXP (x, 1)));
1025 }
1026 }
1027}
1028
80c70e76 1029/* Set the REG_ATTRS for registers in value X, given that X represents
1030 decl T. */
ca74b940 1031
a8dd994c 1032void
80c70e76 1033set_reg_attrs_for_decl_rtl (tree t, rtx x)
1034{
1035 if (GET_CODE (x) == SUBREG)
ebfc27f5 1036 {
80c70e76 1037 gcc_assert (subreg_lowpart_p (x));
1038 x = SUBREG_REG (x);
ebfc27f5 1039 }
8ad4c111 1040 if (REG_P (x))
80c70e76 1041 REG_ATTRS (x)
1042 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
3ff99faa 1043 DECL_MODE (t)));
ca74b940 1044 if (GET_CODE (x) == CONCAT)
1045 {
1046 if (REG_P (XEXP (x, 0)))
1047 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1048 if (REG_P (XEXP (x, 1)))
1049 REG_ATTRS (XEXP (x, 1))
1050 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1051 }
1052 if (GET_CODE (x) == PARALLEL)
1053 {
85d25060 1054 int i, start;
1055
1056 /* Check for a NULL entry, used to indicate that the parameter goes
1057 both on the stack and in registers. */
1058 if (XEXP (XVECEXP (x, 0, 0), 0))
1059 start = 0;
1060 else
1061 start = 1;
1062
1063 for (i = start; i < XVECLEN (x, 0); i++)
ca74b940 1064 {
1065 rtx y = XVECEXP (x, 0, i);
1066 if (REG_P (XEXP (y, 0)))
1067 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1068 }
1069 }
1070}
1071
80c70e76 1072/* Assign the RTX X to declaration T. */
1073
1074void
1075set_decl_rtl (tree t, rtx x)
1076{
1077 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1078 if (x)
1079 set_reg_attrs_for_decl_rtl (t, x);
1080}
1081
d91cf567 1082/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1083 if the ABI requires the parameter to be passed by reference. */
80c70e76 1084
1085void
d91cf567 1086set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
80c70e76 1087{
1088 DECL_INCOMING_RTL (t) = x;
d91cf567 1089 if (x && !by_reference_p)
80c70e76 1090 set_reg_attrs_for_decl_rtl (t, x);
1091}
1092
de8ecfb5 1093/* Identify REG (which may be a CONCAT) as a user register. */
1094
1095void
35cb5232 1096mark_user_reg (rtx reg)
de8ecfb5 1097{
1098 if (GET_CODE (reg) == CONCAT)
1099 {
1100 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1101 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1102 }
de8ecfb5 1103 else
611234b4 1104 {
1105 gcc_assert (REG_P (reg));
1106 REG_USERVAR_P (reg) = 1;
1107 }
de8ecfb5 1108}
1109
d4c332ff 1110/* Identify REG as a probable pointer register and show its alignment
1111 as ALIGN, if nonzero. */
15bbde2b 1112
1113void
35cb5232 1114mark_reg_pointer (rtx reg, int align)
15bbde2b 1115{
e61a0a7f 1116 if (! REG_POINTER (reg))
612409a6 1117 {
e61a0a7f 1118 REG_POINTER (reg) = 1;
d4c332ff 1119
612409a6 1120 if (align)
1121 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1122 }
1123 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
8b332087 1124 /* We can no-longer be sure just how aligned this pointer is. */
d4c332ff 1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 1126}
1127
1128/* Return 1 plus largest pseudo reg number used in the current function. */
1129
1130int
35cb5232 1131max_reg_num (void)
15bbde2b 1132{
1133 return reg_rtx_no;
1134}
1135
1136/* Return 1 + the largest label number used so far in the current function. */
1137
1138int
35cb5232 1139max_label_num (void)
15bbde2b 1140{
15bbde2b 1141 return label_num;
1142}
1143
1144/* Return first label number used in this function (if any were used). */
1145
1146int
35cb5232 1147get_first_label_num (void)
15bbde2b 1148{
1149 return first_label_num;
1150}
4ee9c684 1151
1152/* If the rtx for label was created during the expansion of a nested
1153 function, then first_label_num won't include this label number.
f0b5f617 1154 Fix this now so that array indices work later. */
4ee9c684 1155
1156void
1157maybe_set_first_label_num (rtx x)
1158{
1159 if (CODE_LABEL_NUMBER (x) < first_label_num)
1160 first_label_num = CODE_LABEL_NUMBER (x);
1161}
15bbde2b 1162\f
1163/* Return a value representing some low-order bits of X, where the number
1164 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1165 between floating-point and fixed-point values, rather, the bit
15bbde2b 1166 representation is returned.
1167
1168 This function handles the cases in common between gen_lowpart, below,
1169 and two variants in cse.c and combine.c. These are the cases that can
1170 be safely handled at all points in the compilation.
1171
1172 If this is not a case we can handle, return 0. */
1173
1174rtx
35cb5232 1175gen_lowpart_common (enum machine_mode mode, rtx x)
15bbde2b 1176{
701e46d0 1177 int msize = GET_MODE_SIZE (mode);
791172c5 1178 int xsize;
701e46d0 1179 int offset = 0;
791172c5 1180 enum machine_mode innermode;
1181
1182 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1183 so we have to make one up. Yuk. */
1184 innermode = GET_MODE (x);
971ba038 1185 if (CONST_INT_P (x)
6c799a83 1186 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
791172c5 1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1188 else if (innermode == VOIDmode)
1189 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
48e1416a 1190
791172c5 1191 xsize = GET_MODE_SIZE (innermode);
1192
611234b4 1193 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
15bbde2b 1194
791172c5 1195 if (innermode == mode)
15bbde2b 1196 return x;
1197
1198 /* MODE must occupy no more words than the mode of X. */
791172c5 1199 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1200 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
15bbde2b 1201 return 0;
1202
9abe1e73 1203 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
cee7491d 1204 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
9abe1e73 1205 return 0;
1206
791172c5 1207 offset = subreg_lowpart_offset (mode, innermode);
15bbde2b 1208
1209 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 1210 && (GET_MODE_CLASS (mode) == MODE_INT
1211 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 1212 {
1213 /* If we are getting the low-order part of something that has been
1214 sign- or zero-extended, we can either just use the object being
1215 extended or make a narrower extension. If we want an even smaller
1216 piece than the size of the object being extended, call ourselves
1217 recursively.
1218
1219 This case is used mostly by combine and cse. */
1220
1221 if (GET_MODE (XEXP (x, 0)) == mode)
1222 return XEXP (x, 0);
791172c5 1223 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
15bbde2b 1224 return gen_lowpart_common (mode, XEXP (x, 0));
791172c5 1225 else if (msize < xsize)
3ad7bb1c 1226 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 1227 }
8ad4c111 1228 else if (GET_CODE (x) == SUBREG || REG_P (x)
791172c5 1229 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
971ba038 1230 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
791172c5 1231 return simplify_gen_subreg (mode, x, innermode, offset);
4a307dd5 1232
15bbde2b 1233 /* Otherwise, we can't do this. */
1234 return 0;
1235}
1236\f
d56d0ca2 1237rtx
35cb5232 1238gen_highpart (enum machine_mode mode, rtx x)
d56d0ca2 1239{
701e46d0 1240 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1241 rtx result;
701e46d0 1242
d56d0ca2 1243 /* This case loses if X is a subreg. To catch bugs early,
1244 complain if an invalid MODE is used even in other cases. */
611234b4 1245 gcc_assert (msize <= UNITS_PER_WORD
1246 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
701e46d0 1247
81802af6 1248 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1249 subreg_highpart_offset (mode, GET_MODE (x)));
611234b4 1250 gcc_assert (result);
48e1416a 1251
a8c36ab2 1252 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1253 the target if we have a MEM. gen_highpart must return a valid operand,
1254 emitting code if necessary to do so. */
611234b4 1255 if (MEM_P (result))
1256 {
1257 result = validize_mem (result);
1258 gcc_assert (result);
1259 }
48e1416a 1260
81802af6 1261 return result;
1262}
704fcf2b 1263
29d56731 1264/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1265 be VOIDmode constant. */
1266rtx
35cb5232 1267gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
704fcf2b 1268{
1269 if (GET_MODE (exp) != VOIDmode)
1270 {
611234b4 1271 gcc_assert (GET_MODE (exp) == innermode);
704fcf2b 1272 return gen_highpart (outermode, exp);
1273 }
1274 return simplify_gen_subreg (outermode, exp, innermode,
1275 subreg_highpart_offset (outermode, innermode));
1276}
d4c5e26d 1277
80c70e76 1278/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
10ef59ac 1279
81802af6 1280unsigned int
35cb5232 1281subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
81802af6 1282{
1283 unsigned int offset = 0;
1284 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1285
81802af6 1286 if (difference > 0)
d56d0ca2 1287 {
81802af6 1288 if (WORDS_BIG_ENDIAN)
1289 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1290 if (BYTES_BIG_ENDIAN)
1291 offset += difference % UNITS_PER_WORD;
d56d0ca2 1292 }
701e46d0 1293
81802af6 1294 return offset;
d56d0ca2 1295}
64ab453f 1296
81802af6 1297/* Return offset in bytes to get OUTERMODE high part
1298 of the value in mode INNERMODE stored in memory in target format. */
1299unsigned int
35cb5232 1300subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
64ab453f 1301{
1302 unsigned int offset = 0;
1303 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1304
611234b4 1305 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
81802af6 1306
64ab453f 1307 if (difference > 0)
1308 {
81802af6 1309 if (! WORDS_BIG_ENDIAN)
64ab453f 1310 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1311 if (! BYTES_BIG_ENDIAN)
64ab453f 1312 offset += difference % UNITS_PER_WORD;
1313 }
1314
81802af6 1315 return offset;
64ab453f 1316}
d56d0ca2 1317
15bbde2b 1318/* Return 1 iff X, assumed to be a SUBREG,
1319 refers to the least significant part of its containing reg.
1320 If X is not a SUBREG, always return 1 (it is its own low part!). */
1321
1322int
b7bf20db 1323subreg_lowpart_p (const_rtx x)
15bbde2b 1324{
1325 if (GET_CODE (x) != SUBREG)
1326 return 1;
7e14c1bf 1327 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1328 return 0;
15bbde2b 1329
81802af6 1330 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1331 == SUBREG_BYTE (x));
15bbde2b 1332}
1333\f
701e46d0 1334/* Return subword OFFSET of operand OP.
1335 The word number, OFFSET, is interpreted as the word number starting
1336 at the low-order address. OFFSET 0 is the low-order word if not
1337 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1338
1339 If we cannot extract the required word, we return zero. Otherwise,
1340 an rtx corresponding to the requested word will be returned.
1341
1342 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1343 reload has completed, a valid address will always be returned. After
1344 reload, if a valid address cannot be returned, we return zero.
1345
1346 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1347 it is the responsibility of the caller.
1348
1349 MODE is the mode of OP in case it is a CONST_INT.
1350
1351 ??? This is still rather broken for some cases. The problem for the
1352 moment is that all callers of this thing provide no 'goal mode' to
1353 tell us to work with. This exists because all callers were written
84e81e84 1354 in a word based SUBREG world.
1355 Now use of this function can be deprecated by simplify_subreg in most
1356 cases.
1357 */
701e46d0 1358
1359rtx
35cb5232 1360operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
701e46d0 1361{
1362 if (mode == VOIDmode)
1363 mode = GET_MODE (op);
1364
611234b4 1365 gcc_assert (mode != VOIDmode);
701e46d0 1366
6312a35e 1367 /* If OP is narrower than a word, fail. */
701e46d0 1368 if (mode != BLKmode
1369 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1370 return 0;
1371
6312a35e 1372 /* If we want a word outside OP, return zero. */
701e46d0 1373 if (mode != BLKmode
1374 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1375 return const0_rtx;
1376
701e46d0 1377 /* Form a new MEM at the requested address. */
e16ceb8e 1378 if (MEM_P (op))
701e46d0 1379 {
9ce37fa7 1380 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1381
e4e86ec5 1382 if (! validate_address)
9ce37fa7 1383 return new_rtx;
e4e86ec5 1384
1385 else if (reload_completed)
701e46d0 1386 {
bd1a81f7 1387 if (! strict_memory_address_addr_space_p (word_mode,
1388 XEXP (new_rtx, 0),
1389 MEM_ADDR_SPACE (op)))
e4e86ec5 1390 return 0;
701e46d0 1391 }
e4e86ec5 1392 else
9ce37fa7 1393 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
701e46d0 1394 }
1395
84e81e84 1396 /* Rest can be handled by simplify_subreg. */
1397 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1398}
1399
89f18f73 1400/* Similar to `operand_subword', but never return 0. If we can't
1401 extract the required subword, put OP into a register and try again.
1402 The second attempt must succeed. We always validate the address in
1403 this case.
15bbde2b 1404
1405 MODE is the mode of OP, in case it is CONST_INT. */
1406
1407rtx
35cb5232 1408operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
15bbde2b 1409{
701e46d0 1410 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1411
1412 if (result)
1413 return result;
1414
1415 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1416 {
1417 /* If this is a register which can not be accessed by words, copy it
1418 to a pseudo register. */
8ad4c111 1419 if (REG_P (op))
ac825d29 1420 op = copy_to_reg (op);
1421 else
1422 op = force_reg (mode, op);
1423 }
15bbde2b 1424
701e46d0 1425 result = operand_subword (op, offset, 1, mode);
611234b4 1426 gcc_assert (result);
15bbde2b 1427
1428 return result;
1429}
1430\f
b3ff8d90 1431/* Returns 1 if both MEM_EXPR can be considered equal
1432 and 0 otherwise. */
1433
1434int
52d07779 1435mem_expr_equal_p (const_tree expr1, const_tree expr2)
b3ff8d90 1436{
1437 if (expr1 == expr2)
1438 return 1;
1439
1440 if (! expr1 || ! expr2)
1441 return 0;
1442
1443 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1444 return 0;
1445
3a443843 1446 return operand_equal_p (expr1, expr2, 0);
b3ff8d90 1447}
1448
ad0a178f 1449/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1450 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1451 -1 if not known. */
1452
1453int
7cfdc2f0 1454get_mem_align_offset (rtx mem, unsigned int align)
ad0a178f 1455{
1456 tree expr;
1457 unsigned HOST_WIDE_INT offset;
1458
1459 /* This function can't use
1460 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1461 || !CONST_INT_P (MEM_OFFSET (mem))
98ab9e8f 1462 || (MAX (MEM_ALIGN (mem),
1463 get_object_alignment (MEM_EXPR (mem), align))
ad0a178f 1464 < align))
1465 return -1;
1466 else
1467 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1468 for two reasons:
1469 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1470 for <variable>. get_inner_reference doesn't handle it and
1471 even if it did, the alignment in that case needs to be determined
1472 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1473 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1474 isn't sufficiently aligned, the object it is in might be. */
1475 gcc_assert (MEM_P (mem));
1476 expr = MEM_EXPR (mem);
1477 if (expr == NULL_TREE
1478 || MEM_OFFSET (mem) == NULL_RTX
1479 || !CONST_INT_P (MEM_OFFSET (mem)))
1480 return -1;
1481
1482 offset = INTVAL (MEM_OFFSET (mem));
1483 if (DECL_P (expr))
1484 {
1485 if (DECL_ALIGN (expr) < align)
1486 return -1;
1487 }
1488 else if (INDIRECT_REF_P (expr))
1489 {
1490 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1491 return -1;
1492 }
1493 else if (TREE_CODE (expr) == COMPONENT_REF)
1494 {
1495 while (1)
1496 {
1497 tree inner = TREE_OPERAND (expr, 0);
1498 tree field = TREE_OPERAND (expr, 1);
1499 tree byte_offset = component_ref_field_offset (expr);
1500 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1501
1502 if (!byte_offset
1503 || !host_integerp (byte_offset, 1)
1504 || !host_integerp (bit_offset, 1))
1505 return -1;
1506
1507 offset += tree_low_cst (byte_offset, 1);
1508 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1509
1510 if (inner == NULL_TREE)
1511 {
1512 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1513 < (unsigned int) align)
1514 return -1;
1515 break;
1516 }
1517 else if (DECL_P (inner))
1518 {
1519 if (DECL_ALIGN (inner) < align)
1520 return -1;
1521 break;
1522 }
1523 else if (TREE_CODE (inner) != COMPONENT_REF)
1524 return -1;
1525 expr = inner;
1526 }
1527 }
1528 else
1529 return -1;
1530
1531 return offset & ((align / BITS_PER_UNIT) - 1);
1532}
1533
310b57a1 1534/* Given REF (a MEM) and T, either the type of X or the expression
c6259b83 1535 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1536 if we are making a new object of this type. BITPOS is nonzero if
1537 there is an offset outstanding on T that will be applied later. */
c6259b83 1538
1539void
35cb5232 1540set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1541 HOST_WIDE_INT bitpos)
c6259b83 1542{
32c2fdea 1543 alias_set_type alias = MEM_ALIAS_SET (ref);
b10dbbca 1544 tree expr = MEM_EXPR (ref);
2a631e19 1545 rtx offset = MEM_OFFSET (ref);
1546 rtx size = MEM_SIZE (ref);
1547 unsigned int align = MEM_ALIGN (ref);
6f717f77 1548 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1549 tree type;
1550
1551 /* It can happen that type_for_mode was given a mode for which there
1552 is no language-level type. In which case it returns NULL, which
1553 we can see here. */
1554 if (t == NULL_TREE)
1555 return;
1556
1557 type = TYPE_P (t) ? t : TREE_TYPE (t);
4ccffa39 1558 if (type == error_mark_node)
1559 return;
c6259b83 1560
c6259b83 1561 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1562 wrong answer, as it assumes that DECL_RTL already has the right alias
1563 info. Callers should not set DECL_RTL until after the call to
1564 set_mem_attributes. */
611234b4 1565 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
c6259b83 1566
96216d37 1567 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1568 front-end routine) and use it. */
1569 alias = get_alias_set (t);
c6259b83 1570
fbc6244b 1571 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
cfefc966 1572 MEM_IN_STRUCT_P (ref)
1573 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
8d350e69 1574 MEM_POINTER (ref) = POINTER_TYPE_P (type);
c6259b83 1575
2a631e19 1576 /* If we are making an object of this type, or if this is a DECL, we know
1577 that it is a scalar if the type is not an aggregate. */
cfefc966 1578 if ((objectp || DECL_P (t))
1579 && ! AGGREGATE_TYPE_P (type)
1580 && TREE_CODE (type) != COMPLEX_TYPE)
c6259b83 1581 MEM_SCALAR_P (ref) = 1;
1582
a9d9ab08 1583 /* We can set the alignment from the type if we are making an object,
1584 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
679e0056 1585 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
a9d9ab08 1586 align = MAX (align, TYPE_ALIGN (type));
679e0056 1587
182cf5a9 1588 else if (TREE_CODE (t) == MEM_REF)
1589 {
679e0056 1590 tree op0 = TREE_OPERAND (t, 0);
06a807aa 1591 if (TREE_CODE (op0) == ADDR_EXPR
1592 && (DECL_P (TREE_OPERAND (op0, 0))
1593 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
182cf5a9 1594 {
06a807aa 1595 if (DECL_P (TREE_OPERAND (op0, 0)))
1596 align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1597 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1598 {
1599 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
182cf5a9 1600#ifdef CONSTANT_ALIGNMENT
06a807aa 1601 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
182cf5a9 1602#endif
06a807aa 1603 }
1604 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1605 {
1606 unsigned HOST_WIDE_INT ioff
1607 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1608 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1609 align = MIN (aoff, align);
1610 }
182cf5a9 1611 }
1612 else
936dedf3 1613 /* ??? This isn't fully correct, we can't set the alignment from the
1614 type in all cases. */
1615 align = MAX (align, TYPE_ALIGN (type));
182cf5a9 1616 }
679e0056 1617
559c9389 1618 else if (TREE_CODE (t) == TARGET_MEM_REF)
1619 /* ??? This isn't fully correct, we can't set the alignment from the
1620 type in all cases. */
1621 align = MAX (align, TYPE_ALIGN (type));
1622
679e0056 1623 else if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1624 {
1625 if (integer_zerop (TREE_OPERAND (t, 1)))
1626 /* We don't know anything about the alignment. */
1627 align = BITS_PER_UNIT;
1628 else
1629 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1630 }
ecfe4ca9 1631
96216d37 1632 /* If the size is known, we can set that. */
1633 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
2a631e19 1634 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
96216d37 1635
579bccf9 1636 /* If T is not a type, we may be able to deduce some more information about
1637 the expression. */
1638 if (! TYPE_P (t))
2a631e19 1639 {
ae2dd339 1640 tree base;
698537d1 1641 bool align_computed = false;
b04fab2a 1642
2a631e19 1643 if (TREE_THIS_VOLATILE (t))
1644 MEM_VOLATILE_P (ref) = 1;
c6259b83 1645
3c00f11c 1646 /* Now remove any conversions: they don't change what the underlying
1647 object is. Likewise for SAVE_EXPR. */
72dd6141 1648 while (CONVERT_EXPR_P (t)
3c00f11c 1649 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1650 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1651 t = TREE_OPERAND (t, 0);
1652
ae2dd339 1653 /* We may look through structure-like accesses for the purposes of
1654 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1655 base = t;
1656 while (TREE_CODE (base) == COMPONENT_REF
1657 || TREE_CODE (base) == REALPART_EXPR
1658 || TREE_CODE (base) == IMAGPART_EXPR
1659 || TREE_CODE (base) == BIT_FIELD_REF)
1660 base = TREE_OPERAND (base, 0);
1661
182cf5a9 1662 if (TREE_CODE (base) == MEM_REF
1663 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1664 base = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
ae2dd339 1665 if (DECL_P (base))
1666 {
1667 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1668 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1669 else
1670 MEM_NOTRAP_P (ref) = 1;
1671 }
1672 else
1673 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1674
1675 base = get_base_address (base);
1676 if (base && DECL_P (base)
1677 && TREE_READONLY (base)
1678 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
a62dc878 1679 MEM_READONLY_P (ref) = 1;
ae2dd339 1680
2b02580f 1681 /* If this expression uses it's parent's alias set, mark it such
1682 that we won't change it. */
1683 if (component_uses_parent_alias_set (t))
5cc193e7 1684 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1685
2a631e19 1686 /* If this is a decl, set the attributes of the MEM from it. */
1687 if (DECL_P (t))
1688 {
b10dbbca 1689 expr = t;
1690 offset = const0_rtx;
6f717f77 1691 apply_bitpos = bitpos;
2a631e19 1692 size = (DECL_SIZE_UNIT (t)
1693 && host_integerp (DECL_SIZE_UNIT (t), 1)
1694 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
d4c5e26d 1695 align = DECL_ALIGN (t);
698537d1 1696 align_computed = true;
2a631e19 1697 }
1698
ecfe4ca9 1699 /* If this is a constant, we know the alignment. */
ce45a448 1700 else if (CONSTANT_CLASS_P (t))
42f6f447 1701 {
1702 align = TYPE_ALIGN (type);
1703#ifdef CONSTANT_ALIGNMENT
1704 align = CONSTANT_ALIGNMENT (t, align);
1705#endif
698537d1 1706 align_computed = true;
42f6f447 1707 }
b10dbbca 1708
1709 /* If this is a field reference and not a bit-field, record it. */
f0b5f617 1710 /* ??? There is some information that can be gleaned from bit-fields,
b10dbbca 1711 such as the word offset in the structure that might be modified.
1712 But skip it for now. */
1713 else if (TREE_CODE (t) == COMPONENT_REF
1714 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1715 {
3a443843 1716 expr = t;
b10dbbca 1717 offset = const0_rtx;
6f717f77 1718 apply_bitpos = bitpos;
b10dbbca 1719 /* ??? Any reason the field size would be different than
1720 the size we got from the type? */
1721 }
1722
1723 /* If this is an array reference, look for an outer field reference. */
1724 else if (TREE_CODE (t) == ARRAY_REF)
1725 {
1726 tree off_tree = size_zero_node;
6b039979 1727 /* We can't modify t, because we use it at the end of the
1728 function. */
1729 tree t2 = t;
b10dbbca 1730
1731 do
1732 {
6b039979 1733 tree index = TREE_OPERAND (t2, 1);
6374121b 1734 tree low_bound = array_ref_low_bound (t2);
1735 tree unit_size = array_ref_element_size (t2);
97f8ce30 1736
1737 /* We assume all arrays have sizes that are a multiple of a byte.
1738 First subtract the lower bound, if any, in the type of the
6374121b 1739 index, then convert to sizetype and multiply by the size of
1740 the array element. */
1741 if (! integer_zerop (low_bound))
faa43f85 1742 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1743 index, low_bound);
97f8ce30 1744
6374121b 1745 off_tree = size_binop (PLUS_EXPR,
535664e3 1746 size_binop (MULT_EXPR,
1747 fold_convert (sizetype,
1748 index),
6374121b 1749 unit_size),
1750 off_tree);
6b039979 1751 t2 = TREE_OPERAND (t2, 0);
b10dbbca 1752 }
6b039979 1753 while (TREE_CODE (t2) == ARRAY_REF);
b10dbbca 1754
6b039979 1755 if (DECL_P (t2))
2d8fe5d0 1756 {
6b039979 1757 expr = t2;
0318dc09 1758 offset = NULL;
2d8fe5d0 1759 if (host_integerp (off_tree, 1))
0318dc09 1760 {
1761 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1762 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
6b039979 1763 align = DECL_ALIGN (t2);
3473aefe 1764 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
0318dc09 1765 align = aoff;
698537d1 1766 align_computed = true;
0318dc09 1767 offset = GEN_INT (ioff);
6f717f77 1768 apply_bitpos = bitpos;
0318dc09 1769 }
2d8fe5d0 1770 }
6b039979 1771 else if (TREE_CODE (t2) == COMPONENT_REF)
b10dbbca 1772 {
3a443843 1773 expr = t2;
1774 offset = NULL;
b10dbbca 1775 if (host_integerp (off_tree, 1))
6f717f77 1776 {
1777 offset = GEN_INT (tree_low_cst (off_tree, 1));
1778 apply_bitpos = bitpos;
1779 }
b10dbbca 1780 /* ??? Any reason the field size would be different than
1781 the size we got from the type? */
1782 }
6d72287b 1783
6d72287b 1784 /* If this is an indirect reference, record it. */
182cf5a9 1785 else if (TREE_CODE (t) == MEM_REF
6d72287b 1786 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1787 {
1788 expr = t;
1789 offset = const0_rtx;
1790 apply_bitpos = bitpos;
1791 }
2d8fe5d0 1792 }
1793
6d72287b 1794 /* If this is an indirect reference, record it. */
182cf5a9 1795 else if (TREE_CODE (t) == MEM_REF
7e9c660e 1796 || TREE_CODE (t) == TARGET_MEM_REF
6d72287b 1797 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1798 {
1799 expr = t;
1800 offset = const0_rtx;
1801 apply_bitpos = bitpos;
1802 }
1803
698537d1 1804 if (!align_computed && !INDIRECT_REF_P (t))
1805 {
98ab9e8f 1806 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
698537d1 1807 align = MAX (align, obj_align);
1808 }
2a631e19 1809 }
1810
e2e205b3 1811 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 1812 bit position offset. Similarly, increase the size of the accessed
1813 object to contain the negative offset. */
6f717f77 1814 if (apply_bitpos)
595f1461 1815 {
1816 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1817 if (size)
1818 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1819 }
6f717f77 1820
2a631e19 1821 /* Now set the attributes we computed above. */
5cc193e7 1822 MEM_ATTRS (ref)
bd1a81f7 1823 = get_mem_attrs (alias, expr, offset, size, align,
1824 TYPE_ADDR_SPACE (type), GET_MODE (ref));
2a631e19 1825
1826 /* If this is already known to be a scalar or aggregate, we are done. */
1827 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
96216d37 1828 return;
1829
2a631e19 1830 /* If it is a reference into an aggregate, this is part of an aggregate.
1831 Otherwise we don't know. */
c6259b83 1832 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1833 || TREE_CODE (t) == ARRAY_RANGE_REF
1834 || TREE_CODE (t) == BIT_FIELD_REF)
1835 MEM_IN_STRUCT_P (ref) = 1;
1836}
1837
6f717f77 1838void
35cb5232 1839set_mem_attributes (rtx ref, tree t, int objectp)
6f717f77 1840{
1841 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1842}
1843
c6259b83 1844/* Set the alias set of MEM to SET. */
1845
1846void
32c2fdea 1847set_mem_alias_set (rtx mem, alias_set_type set)
c6259b83 1848{
d4c5e26d 1849#ifdef ENABLE_CHECKING
c6259b83 1850 /* If the new and old alias sets don't conflict, something is wrong. */
611234b4 1851 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
c6259b83 1852#endif
1853
b10dbbca 1854 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
5cc193e7 1855 MEM_SIZE (mem), MEM_ALIGN (mem),
bd1a81f7 1856 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1857}
1858
1859/* Set the address space of MEM to ADDRSPACE (target-defined). */
1860
1861void
1862set_mem_addr_space (rtx mem, addr_space_t addrspace)
1863{
1864 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1865 MEM_OFFSET (mem), MEM_SIZE (mem),
1866 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
c6259b83 1867}
96216d37 1868
1c4512da 1869/* Set the alignment of MEM to ALIGN bits. */
96216d37 1870
1871void
35cb5232 1872set_mem_align (rtx mem, unsigned int align)
96216d37 1873{
b10dbbca 1874 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
5cc193e7 1875 MEM_OFFSET (mem), MEM_SIZE (mem), align,
bd1a81f7 1876 MEM_ADDR_SPACE (mem), GET_MODE (mem));
96216d37 1877}
278fe152 1878
b10dbbca 1879/* Set the expr for MEM to EXPR. */
278fe152 1880
1881void
35cb5232 1882set_mem_expr (rtx mem, tree expr)
278fe152 1883{
1884 MEM_ATTRS (mem)
b10dbbca 1885 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
bd1a81f7 1886 MEM_SIZE (mem), MEM_ALIGN (mem),
1887 MEM_ADDR_SPACE (mem), GET_MODE (mem));
278fe152 1888}
b10dbbca 1889
1890/* Set the offset of MEM to OFFSET. */
1891
1892void
35cb5232 1893set_mem_offset (rtx mem, rtx offset)
b10dbbca 1894{
1895 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1896 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
bd1a81f7 1897 MEM_ADDR_SPACE (mem), GET_MODE (mem));
f0500469 1898}
1899
1900/* Set the size of MEM to SIZE. */
1901
1902void
35cb5232 1903set_mem_size (rtx mem, rtx size)
f0500469 1904{
1905 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1906 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
bd1a81f7 1907 MEM_ADDR_SPACE (mem), GET_MODE (mem));
b10dbbca 1908}
c6259b83 1909\f
96216d37 1910/* Return a memory reference like MEMREF, but with its mode changed to MODE
1911 and its address changed to ADDR. (VOIDmode means don't change the mode.
1912 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1913 returned memory location is required to be valid. The memory
1914 attributes are not changed. */
15bbde2b 1915
96216d37 1916static rtx
35cb5232 1917change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
15bbde2b 1918{
bd1a81f7 1919 addr_space_t as;
9ce37fa7 1920 rtx new_rtx;
15bbde2b 1921
611234b4 1922 gcc_assert (MEM_P (memref));
bd1a81f7 1923 as = MEM_ADDR_SPACE (memref);
15bbde2b 1924 if (mode == VOIDmode)
1925 mode = GET_MODE (memref);
1926 if (addr == 0)
1927 addr = XEXP (memref, 0);
3988ef8b 1928 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
bd1a81f7 1929 && (!validate || memory_address_addr_space_p (mode, addr, as)))
3988ef8b 1930 return memref;
15bbde2b 1931
e4e86ec5 1932 if (validate)
15bbde2b 1933 {
e4e86ec5 1934 if (reload_in_progress || reload_completed)
bd1a81f7 1935 gcc_assert (memory_address_addr_space_p (mode, addr, as));
e4e86ec5 1936 else
bd1a81f7 1937 addr = memory_address_addr_space (mode, addr, as);
15bbde2b 1938 }
d823ba47 1939
e8976cd7 1940 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1941 return memref;
1942
9ce37fa7 1943 new_rtx = gen_rtx_MEM (mode, addr);
1944 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1945 return new_rtx;
15bbde2b 1946}
537ffcfc 1947
96216d37 1948/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1949 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 1950
1951rtx
35cb5232 1952change_address (rtx memref, enum machine_mode mode, rtx addr)
e513d163 1953{
9ce37fa7 1954 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1955 enum machine_mode mmode = GET_MODE (new_rtx);
0ab96142 1956 unsigned int align;
1957
1958 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1959 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
6cc60c4d 1960
d28edf0d 1961 /* If there are no changes, just return the original memory reference. */
9ce37fa7 1962 if (new_rtx == memref)
0ab96142 1963 {
1964 if (MEM_ATTRS (memref) == 0
1965 || (MEM_EXPR (memref) == NULL
1966 && MEM_OFFSET (memref) == NULL
1967 && MEM_SIZE (memref) == size
1968 && MEM_ALIGN (memref) == align))
9ce37fa7 1969 return new_rtx;
0ab96142 1970
9ce37fa7 1971 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1972 MEM_COPY_ATTRIBUTES (new_rtx, memref);
0ab96142 1973 }
d28edf0d 1974
9ce37fa7 1975 MEM_ATTRS (new_rtx)
bd1a81f7 1976 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1977 MEM_ADDR_SPACE (memref), mmode);
fb257ae6 1978
9ce37fa7 1979 return new_rtx;
e513d163 1980}
537ffcfc 1981
96216d37 1982/* Return a memory reference like MEMREF, but with its mode changed
1983 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 1984 nonzero, the memory address is forced to be valid.
1985 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1986 and caller is responsible for adjusting MEMREF base register. */
e4e86ec5 1987
1988rtx
35cb5232 1989adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1990 int validate, int adjust)
e4e86ec5 1991{
fb257ae6 1992 rtx addr = XEXP (memref, 0);
9ce37fa7 1993 rtx new_rtx;
96216d37 1994 rtx memoffset = MEM_OFFSET (memref);
5cc193e7 1995 rtx size = 0;
96216d37 1996 unsigned int memalign = MEM_ALIGN (memref);
bd1a81f7 1997 addr_space_t as = MEM_ADDR_SPACE (memref);
98155838 1998 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
cfb75cdf 1999 int pbits;
fb257ae6 2000
d28edf0d 2001 /* If there are no changes, just return the original memory reference. */
2002 if (mode == GET_MODE (memref) && !offset
bd1a81f7 2003 && (!validate || memory_address_addr_space_p (mode, addr, as)))
d28edf0d 2004 return memref;
2005
e36c3d58 2006 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 2007 This may happen even if offset is nonzero -- consider
e36c3d58 2008 (plus (plus reg reg) const_int) -- so do this always. */
2009 addr = copy_rtx (addr);
2010
cfb75cdf 2011 /* Convert a possibly large offset to a signed value within the
2012 range of the target address space. */
98155838 2013 pbits = GET_MODE_BITSIZE (address_mode);
cfb75cdf 2014 if (HOST_BITS_PER_WIDE_INT > pbits)
2015 {
2016 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2017 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2018 >> shift);
2019 }
2020
cd358719 2021 if (adjust)
2022 {
2023 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2024 object, we can merge it into the LO_SUM. */
2025 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2026 && offset >= 0
2027 && (unsigned HOST_WIDE_INT) offset
2028 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
98155838 2029 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
cd358719 2030 plus_constant (XEXP (addr, 1), offset));
2031 else
2032 addr = plus_constant (addr, offset);
2033 }
fb257ae6 2034
9ce37fa7 2035 new_rtx = change_address_1 (memref, mode, addr, validate);
96216d37 2036
e077413c 2037 /* If the address is a REG, change_address_1 rightfully returns memref,
2038 but this would destroy memref's MEM_ATTRS. */
2039 if (new_rtx == memref && offset != 0)
2040 new_rtx = copy_rtx (new_rtx);
2041
96216d37 2042 /* Compute the new values of the memory attributes due to this adjustment.
2043 We add the offsets and update the alignment. */
2044 if (memoffset)
2045 memoffset = GEN_INT (offset + INTVAL (memoffset));
2046
b8098e5b 2047 /* Compute the new alignment by taking the MIN of the alignment and the
2048 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2049 if zero. */
2050 if (offset != 0)
f4aee538 2051 memalign
2052 = MIN (memalign,
2053 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
96216d37 2054
5cc193e7 2055 /* We can compute the size in a number of ways. */
9ce37fa7 2056 if (GET_MODE (new_rtx) != BLKmode)
2057 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
5cc193e7 2058 else if (MEM_SIZE (memref))
2059 size = plus_constant (MEM_SIZE (memref), -offset);
2060
9ce37fa7 2061 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
bd1a81f7 2062 memoffset, size, memalign, as,
2063 GET_MODE (new_rtx));
96216d37 2064
2065 /* At some point, we should validate that this offset is within the object,
2066 if all the appropriate values are known. */
9ce37fa7 2067 return new_rtx;
e4e86ec5 2068}
2069
bf42c62d 2070/* Return a memory reference like MEMREF, but with its mode changed
2071 to MODE and its address changed to ADDR, which is assumed to be
f0b5f617 2072 MEMREF offset by OFFSET bytes. If VALIDATE is
bf42c62d 2073 nonzero, the memory address is forced to be valid. */
2074
2075rtx
35cb5232 2076adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2077 HOST_WIDE_INT offset, int validate)
bf42c62d 2078{
2079 memref = change_address_1 (memref, VOIDmode, addr, validate);
2080 return adjust_address_1 (memref, mode, offset, validate, 0);
2081}
2082
2a631e19 2083/* Return a memory reference like MEMREF, but whose address is changed by
2084 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2085 known to be in OFFSET (possibly 1). */
fcdc122e 2086
2087rtx
35cb5232 2088offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
fcdc122e 2089{
9ce37fa7 2090 rtx new_rtx, addr = XEXP (memref, 0);
bd1a81f7 2091 addr_space_t as = MEM_ADDR_SPACE (memref);
98155838 2092 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
fac6aae6 2093
98155838 2094 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2095
d4c5e26d 2096 /* At this point we don't know _why_ the address is invalid. It
917bbcab 2097 could have secondary memory references, multiplies or anything.
fac6aae6 2098
2099 However, if we did go and rearrange things, we can wind up not
2100 being able to recognize the magic around pic_offset_table_rtx.
2101 This stuff is fragile, and is yet another example of why it is
2102 bad to expose PIC machinery too early. */
bd1a81f7 2103 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
fac6aae6 2104 && GET_CODE (addr) == PLUS
2105 && XEXP (addr, 0) == pic_offset_table_rtx)
2106 {
2107 addr = force_reg (GET_MODE (addr), addr);
98155838 2108 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2109 }
2110
9ce37fa7 2111 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2112 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
fcdc122e 2113
d28edf0d 2114 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2115 if (new_rtx == memref)
2116 return new_rtx;
d28edf0d 2117
fcdc122e 2118 /* Update the alignment to reflect the offset. Reset the offset, which
2119 we don't know. */
9ce37fa7 2120 MEM_ATTRS (new_rtx)
80fabb90 2121 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
84130727 2122 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
bd1a81f7 2123 as, GET_MODE (new_rtx));
9ce37fa7 2124 return new_rtx;
fcdc122e 2125}
d4c5e26d 2126
537ffcfc 2127/* Return a memory reference like MEMREF, but with its address changed to
2128 ADDR. The caller is asserting that the actual piece of memory pointed
2129 to is the same, just the form of the address is being changed, such as
2130 by putting something into a register. */
2131
2132rtx
35cb5232 2133replace_equiv_address (rtx memref, rtx addr)
537ffcfc 2134{
96216d37 2135 /* change_address_1 copies the memory attribute structure without change
2136 and that's exactly what we want here. */
ecfe4ca9 2137 update_temp_slot_address (XEXP (memref, 0), addr);
96216d37 2138 return change_address_1 (memref, VOIDmode, addr, 1);
537ffcfc 2139}
96216d37 2140
e4e86ec5 2141/* Likewise, but the reference is not required to be valid. */
2142
2143rtx
35cb5232 2144replace_equiv_address_nv (rtx memref, rtx addr)
e4e86ec5 2145{
e4e86ec5 2146 return change_address_1 (memref, VOIDmode, addr, 0);
2147}
8259ab07 2148
2149/* Return a memory reference like MEMREF, but with its mode widened to
2150 MODE and offset by OFFSET. This would be used by targets that e.g.
2151 cannot issue QImode memory operations and have to use SImode memory
2152 operations plus masking logic. */
2153
2154rtx
35cb5232 2155widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
8259ab07 2156{
9ce37fa7 2157 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2158 tree expr = MEM_EXPR (new_rtx);
2159 rtx memoffset = MEM_OFFSET (new_rtx);
8259ab07 2160 unsigned int size = GET_MODE_SIZE (mode);
2161
d28edf0d 2162 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2163 if (new_rtx == memref)
2164 return new_rtx;
d28edf0d 2165
8259ab07 2166 /* If we don't know what offset we were at within the expression, then
2167 we can't know if we've overstepped the bounds. */
22ee087b 2168 if (! memoffset)
8259ab07 2169 expr = NULL_TREE;
2170
2171 while (expr)
2172 {
2173 if (TREE_CODE (expr) == COMPONENT_REF)
2174 {
2175 tree field = TREE_OPERAND (expr, 1);
6374121b 2176 tree offset = component_ref_field_offset (expr);
8259ab07 2177
2178 if (! DECL_SIZE_UNIT (field))
2179 {
2180 expr = NULL_TREE;
2181 break;
2182 }
2183
2184 /* Is the field at least as large as the access? If so, ok,
2185 otherwise strip back to the containing structure. */
8359cfb4 2186 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2187 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
8259ab07 2188 && INTVAL (memoffset) >= 0)
2189 break;
2190
6374121b 2191 if (! host_integerp (offset, 1))
8259ab07 2192 {
2193 expr = NULL_TREE;
2194 break;
2195 }
2196
2197 expr = TREE_OPERAND (expr, 0);
6374121b 2198 memoffset
2199 = (GEN_INT (INTVAL (memoffset)
2200 + tree_low_cst (offset, 1)
2201 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2202 / BITS_PER_UNIT)));
8259ab07 2203 }
2204 /* Similarly for the decl. */
2205 else if (DECL_P (expr)
2206 && DECL_SIZE_UNIT (expr)
40c4e66e 2207 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
8259ab07 2208 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2209 && (! memoffset || INTVAL (memoffset) >= 0))
2210 break;
2211 else
2212 {
2213 /* The widened memory access overflows the expression, which means
2214 that it could alias another expression. Zap it. */
2215 expr = NULL_TREE;
2216 break;
2217 }
2218 }
2219
2220 if (! expr)
2221 memoffset = NULL_RTX;
2222
2223 /* The widened memory may alias other stuff, so zap the alias set. */
2224 /* ??? Maybe use get_alias_set on any remaining expression. */
2225
9ce37fa7 2226 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
bd1a81f7 2227 MEM_ALIGN (new_rtx),
2228 MEM_ADDR_SPACE (new_rtx), mode);
8259ab07 2229
9ce37fa7 2230 return new_rtx;
8259ab07 2231}
15bbde2b 2232\f
ac681e84 2233/* A fake decl that is used as the MEM_EXPR of spill slots. */
2234static GTY(()) tree spill_slot_decl;
2235
58029e61 2236tree
2237get_spill_slot_decl (bool force_build_p)
ac681e84 2238{
2239 tree d = spill_slot_decl;
2240 rtx rd;
2241
58029e61 2242 if (d || !force_build_p)
ac681e84 2243 return d;
2244
e60a6f7b 2245 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2246 VAR_DECL, get_identifier ("%sfp"), void_type_node);
ac681e84 2247 DECL_ARTIFICIAL (d) = 1;
2248 DECL_IGNORED_P (d) = 1;
2249 TREE_USED (d) = 1;
2250 TREE_THIS_NOTRAP (d) = 1;
2251 spill_slot_decl = d;
2252
2253 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2254 MEM_NOTRAP_P (rd) = 1;
2255 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
bd1a81f7 2256 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
ac681e84 2257 SET_DECL_RTL (d, rd);
2258
2259 return d;
2260}
2261
2262/* Given MEM, a result from assign_stack_local, fill in the memory
2263 attributes as appropriate for a register allocator spill slot.
2264 These slots are not aliasable by other memory. We arrange for
2265 them all to use a single MEM_EXPR, so that the aliasing code can
2266 work properly in the case of shared spill slots. */
2267
2268void
2269set_mem_attrs_for_spill (rtx mem)
2270{
2271 alias_set_type alias;
2272 rtx addr, offset;
2273 tree expr;
2274
58029e61 2275 expr = get_spill_slot_decl (true);
ac681e84 2276 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2277
2278 /* We expect the incoming memory to be of the form:
2279 (mem:MODE (plus (reg sfp) (const_int offset)))
2280 with perhaps the plus missing for offset = 0. */
2281 addr = XEXP (mem, 0);
2282 offset = const0_rtx;
2283 if (GET_CODE (addr) == PLUS
971ba038 2284 && CONST_INT_P (XEXP (addr, 1)))
ac681e84 2285 offset = XEXP (addr, 1);
2286
2287 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2288 MEM_SIZE (mem), MEM_ALIGN (mem),
bd1a81f7 2289 ADDR_SPACE_GENERIC, GET_MODE (mem));
ac681e84 2290 MEM_NOTRAP_P (mem) = 1;
2291}
2292\f
15bbde2b 2293/* Return a newly created CODE_LABEL rtx with a unique label number. */
2294
2295rtx
35cb5232 2296gen_label_rtx (void)
15bbde2b 2297{
a7ae1e59 2298 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
35cb5232 2299 NULL, label_num++, NULL);
15bbde2b 2300}
2301\f
2302/* For procedure integration. */
2303
15bbde2b 2304/* Install new pointers to the first and last insns in the chain.
d4c332ff 2305 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2306 Used for an inline-procedure after copying the insn chain. */
2307
2308void
35cb5232 2309set_new_first_and_last_insn (rtx first, rtx last)
15bbde2b 2310{
d4c332ff 2311 rtx insn;
2312
06f9d6ef 2313 set_first_insn (first);
2314 set_last_insn (last);
d4c332ff 2315 cur_insn_uid = 0;
2316
9845d120 2317 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2318 {
2319 int debug_count = 0;
2320
2321 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2322 cur_debug_insn_uid = 0;
2323
2324 for (insn = first; insn; insn = NEXT_INSN (insn))
2325 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2326 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2327 else
2328 {
2329 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2330 if (DEBUG_INSN_P (insn))
2331 debug_count++;
2332 }
2333
2334 if (debug_count)
2335 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2336 else
2337 cur_debug_insn_uid++;
2338 }
2339 else
2340 for (insn = first; insn; insn = NEXT_INSN (insn))
2341 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
d4c332ff 2342
2343 cur_insn_uid++;
15bbde2b 2344}
15bbde2b 2345\f
d823ba47 2346/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2347 structure. This routine should only be called once. */
15bbde2b 2348
a40c0eeb 2349static void
df329266 2350unshare_all_rtl_1 (rtx insn)
15bbde2b 2351{
2d96a59a 2352 /* Unshare just about everything else. */
1cd4cfea 2353 unshare_all_rtl_in_chain (insn);
d823ba47 2354
15bbde2b 2355 /* Make sure the addresses of stack slots found outside the insn chain
2356 (such as, in DECL_RTL of a variable) are not shared
2357 with the insn chain.
2358
2359 This special care is necessary when the stack slot MEM does not
2360 actually appear in the insn chain. If it does appear, its address
2361 is unshared from all else at that point. */
45733446 2362 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
15bbde2b 2363}
2364
d823ba47 2365/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2366 structure, again. This is a fairly expensive thing to do so it
2367 should be done sparingly. */
2368
2369void
35cb5232 2370unshare_all_rtl_again (rtx insn)
2d96a59a 2371{
2372 rtx p;
5244079b 2373 tree decl;
2374
2d96a59a 2375 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2376 if (INSN_P (p))
2d96a59a 2377 {
2378 reset_used_flags (PATTERN (p));
2379 reset_used_flags (REG_NOTES (p));
2d96a59a 2380 }
5244079b 2381
01dc9f0c 2382 /* Make sure that virtual stack slots are not shared. */
265be050 2383 set_used_decls (DECL_INITIAL (cfun->decl));
01dc9f0c 2384
5244079b 2385 /* Make sure that virtual parameters are not shared. */
1767a056 2386 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
265be050 2387 set_used_flags (DECL_RTL (decl));
5244079b 2388
2389 reset_used_flags (stack_slot_list);
2390
df329266 2391 unshare_all_rtl_1 (insn);
a40c0eeb 2392}
2393
2a1990e9 2394unsigned int
a40c0eeb 2395unshare_all_rtl (void)
2396{
df329266 2397 unshare_all_rtl_1 (get_insns ());
2a1990e9 2398 return 0;
2d96a59a 2399}
2400
20099e35 2401struct rtl_opt_pass pass_unshare_all_rtl =
77fce4cd 2402{
20099e35 2403 {
2404 RTL_PASS,
228967a9 2405 "unshare", /* name */
77fce4cd 2406 NULL, /* gate */
2407 unshare_all_rtl, /* execute */
2408 NULL, /* sub */
2409 NULL, /* next */
2410 0, /* static_pass_number */
0b1615c1 2411 TV_NONE, /* tv_id */
77fce4cd 2412 0, /* properties_required */
2413 0, /* properties_provided */
2414 0, /* properties_destroyed */
2415 0, /* todo_flags_start */
20099e35 2416 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2417 }
77fce4cd 2418};
2419
2420
1cd4cfea 2421/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2422 Recursively does the same for subexpressions. */
2423
2424static void
2425verify_rtx_sharing (rtx orig, rtx insn)
2426{
2427 rtx x = orig;
2428 int i;
2429 enum rtx_code code;
2430 const char *format_ptr;
2431
2432 if (x == 0)
2433 return;
2434
2435 code = GET_CODE (x);
2436
2437 /* These types may be freely shared. */
2438
2439 switch (code)
2440 {
2441 case REG:
688ff29b 2442 case DEBUG_EXPR:
2443 case VALUE:
1cd4cfea 2444 case CONST_INT:
2445 case CONST_DOUBLE:
e397ad8e 2446 case CONST_FIXED:
1cd4cfea 2447 case CONST_VECTOR:
2448 case SYMBOL_REF:
2449 case LABEL_REF:
2450 case CODE_LABEL:
2451 case PC:
2452 case CC0:
2453 case SCRATCH:
1cd4cfea 2454 return;
c09425a0 2455 /* SCRATCH must be shared because they represent distinct values. */
2456 case CLOBBER:
2457 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2458 return;
2459 break;
1cd4cfea 2460
2461 case CONST:
3072d30e 2462 if (shared_const_p (orig))
1cd4cfea 2463 return;
2464 break;
2465
2466 case MEM:
2467 /* A MEM is allowed to be shared if its address is constant. */
2468 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2469 || reload_completed || reload_in_progress)
2470 return;
2471
2472 break;
2473
2474 default:
2475 break;
2476 }
2477
2478 /* This rtx may not be shared. If it has already been seen,
2479 replace it with a copy of itself. */
9cee7c3f 2480#ifdef ENABLE_CHECKING
1cd4cfea 2481 if (RTX_FLAG (x, used))
2482 {
0a81f5a0 2483 error ("invalid rtl sharing found in the insn");
1cd4cfea 2484 debug_rtx (insn);
0a81f5a0 2485 error ("shared rtx");
1cd4cfea 2486 debug_rtx (x);
0a81f5a0 2487 internal_error ("internal consistency failure");
1cd4cfea 2488 }
9cee7c3f 2489#endif
2490 gcc_assert (!RTX_FLAG (x, used));
48e1416a 2491
1cd4cfea 2492 RTX_FLAG (x, used) = 1;
2493
8b332087 2494 /* Now scan the subexpressions recursively. */
1cd4cfea 2495
2496 format_ptr = GET_RTX_FORMAT (code);
2497
2498 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2499 {
2500 switch (*format_ptr++)
2501 {
2502 case 'e':
2503 verify_rtx_sharing (XEXP (x, i), insn);
2504 break;
2505
2506 case 'E':
2507 if (XVEC (x, i) != NULL)
2508 {
2509 int j;
2510 int len = XVECLEN (x, i);
2511
2512 for (j = 0; j < len; j++)
2513 {
9cee7c3f 2514 /* We allow sharing of ASM_OPERANDS inside single
2515 instruction. */
1cd4cfea 2516 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
9cee7c3f 2517 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2518 == ASM_OPERANDS))
1cd4cfea 2519 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2520 else
2521 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2522 }
2523 }
2524 break;
2525 }
2526 }
2527 return;
2528}
2529
c7bf1374 2530/* Go through all the RTL insn bodies and check that there is no unexpected
1cd4cfea 2531 sharing in between the subexpressions. */
2532
4b987fac 2533DEBUG_FUNCTION void
1cd4cfea 2534verify_rtl_sharing (void)
2535{
2536 rtx p;
2537
2538 for (p = get_insns (); p; p = NEXT_INSN (p))
2539 if (INSN_P (p))
2540 {
2541 reset_used_flags (PATTERN (p));
2542 reset_used_flags (REG_NOTES (p));
764f640f 2543 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2544 {
2545 int i;
2546 rtx q, sequence = PATTERN (p);
2547
2548 for (i = 0; i < XVECLEN (sequence, 0); i++)
2549 {
2550 q = XVECEXP (sequence, 0, i);
2551 gcc_assert (INSN_P (q));
2552 reset_used_flags (PATTERN (q));
2553 reset_used_flags (REG_NOTES (q));
764f640f 2554 }
2555 }
1cd4cfea 2556 }
2557
2558 for (p = get_insns (); p; p = NEXT_INSN (p))
2559 if (INSN_P (p))
2560 {
2561 verify_rtx_sharing (PATTERN (p), p);
2562 verify_rtx_sharing (REG_NOTES (p), p);
1cd4cfea 2563 }
2564}
2565
2d96a59a 2566/* Go through all the RTL insn bodies and copy any invalid shared structure.
2567 Assumes the mark bits are cleared at entry. */
2568
1cd4cfea 2569void
2570unshare_all_rtl_in_chain (rtx insn)
2d96a59a 2571{
2572 for (; insn; insn = NEXT_INSN (insn))
9204e736 2573 if (INSN_P (insn))
2d96a59a 2574 {
2575 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2576 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2d96a59a 2577 }
2578}
2579
01dc9f0c 2580/* Go through all virtual stack slots of a function and mark them as
265be050 2581 shared. We never replace the DECL_RTLs themselves with a copy,
2582 but expressions mentioned into a DECL_RTL cannot be shared with
2583 expressions in the instruction stream.
2584
2585 Note that reload may convert pseudo registers into memories in-place.
2586 Pseudo registers are always shared, but MEMs never are. Thus if we
2587 reset the used flags on MEMs in the instruction stream, we must set
2588 them again on MEMs that appear in DECL_RTLs. */
2589
01dc9f0c 2590static void
265be050 2591set_used_decls (tree blk)
01dc9f0c 2592{
2593 tree t;
2594
2595 /* Mark decls. */
1767a056 2596 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
0e8e37b2 2597 if (DECL_RTL_SET_P (t))
265be050 2598 set_used_flags (DECL_RTL (t));
01dc9f0c 2599
2600 /* Now process sub-blocks. */
93110716 2601 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
265be050 2602 set_used_decls (t);
01dc9f0c 2603}
2604
15bbde2b 2605/* Mark ORIG as in use, and return a copy of it if it was already in use.
7ba6ce7a 2606 Recursively does the same for subexpressions. Uses
2607 copy_rtx_if_shared_1 to reduce stack space. */
15bbde2b 2608
2609rtx
35cb5232 2610copy_rtx_if_shared (rtx orig)
15bbde2b 2611{
0e0727c4 2612 copy_rtx_if_shared_1 (&orig);
2613 return orig;
2614}
2615
7ba6ce7a 2616/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2617 use. Recursively does the same for subexpressions. */
2618
0e0727c4 2619static void
2620copy_rtx_if_shared_1 (rtx *orig1)
2621{
2622 rtx x;
19cb6b50 2623 int i;
2624 enum rtx_code code;
0e0727c4 2625 rtx *last_ptr;
19cb6b50 2626 const char *format_ptr;
15bbde2b 2627 int copied = 0;
0e0727c4 2628 int length;
2629
2630 /* Repeat is used to turn tail-recursion into iteration. */
2631repeat:
2632 x = *orig1;
15bbde2b 2633
2634 if (x == 0)
0e0727c4 2635 return;
15bbde2b 2636
2637 code = GET_CODE (x);
2638
2639 /* These types may be freely shared. */
2640
2641 switch (code)
2642 {
2643 case REG:
688ff29b 2644 case DEBUG_EXPR:
2645 case VALUE:
15bbde2b 2646 case CONST_INT:
2647 case CONST_DOUBLE:
e397ad8e 2648 case CONST_FIXED:
886cfd4f 2649 case CONST_VECTOR:
15bbde2b 2650 case SYMBOL_REF:
1cd4cfea 2651 case LABEL_REF:
15bbde2b 2652 case CODE_LABEL:
2653 case PC:
2654 case CC0:
2655 case SCRATCH:
a92771b8 2656 /* SCRATCH must be shared because they represent distinct values. */
0e0727c4 2657 return;
c09425a0 2658 case CLOBBER:
2659 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2660 return;
2661 break;
15bbde2b 2662
f63d12e3 2663 case CONST:
3072d30e 2664 if (shared_const_p (x))
0e0727c4 2665 return;
f63d12e3 2666 break;
2667
9845d120 2668 case DEBUG_INSN:
15bbde2b 2669 case INSN:
2670 case JUMP_INSN:
2671 case CALL_INSN:
2672 case NOTE:
15bbde2b 2673 case BARRIER:
2674 /* The chain of insns is not being copied. */
0e0727c4 2675 return;
15bbde2b 2676
0dbd1c74 2677 default:
2678 break;
15bbde2b 2679 }
2680
2681 /* This rtx may not be shared. If it has already been seen,
2682 replace it with a copy of itself. */
2683
7c25cb91 2684 if (RTX_FLAG (x, used))
15bbde2b 2685 {
f2d0e9f1 2686 x = shallow_copy_rtx (x);
15bbde2b 2687 copied = 1;
2688 }
7c25cb91 2689 RTX_FLAG (x, used) = 1;
15bbde2b 2690
2691 /* Now scan the subexpressions recursively.
2692 We can store any replaced subexpressions directly into X
2693 since we know X is not shared! Any vectors in X
2694 must be copied if X was copied. */
2695
2696 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2697 length = GET_RTX_LENGTH (code);
2698 last_ptr = NULL;
48e1416a 2699
0e0727c4 2700 for (i = 0; i < length; i++)
15bbde2b 2701 {
2702 switch (*format_ptr++)
2703 {
2704 case 'e':
0e0727c4 2705 if (last_ptr)
2706 copy_rtx_if_shared_1 (last_ptr);
2707 last_ptr = &XEXP (x, i);
15bbde2b 2708 break;
2709
2710 case 'E':
2711 if (XVEC (x, i) != NULL)
2712 {
19cb6b50 2713 int j;
ffe0869b 2714 int len = XVECLEN (x, i);
48e1416a 2715
8b332087 2716 /* Copy the vector iff I copied the rtx and the length
2717 is nonzero. */
ffe0869b 2718 if (copied && len > 0)
a4070a91 2719 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
48e1416a 2720
d632b59a 2721 /* Call recursively on all inside the vector. */
ffe0869b 2722 for (j = 0; j < len; j++)
0e0727c4 2723 {
2724 if (last_ptr)
2725 copy_rtx_if_shared_1 (last_ptr);
2726 last_ptr = &XVECEXP (x, i, j);
2727 }
15bbde2b 2728 }
2729 break;
2730 }
2731 }
0e0727c4 2732 *orig1 = x;
2733 if (last_ptr)
2734 {
2735 orig1 = last_ptr;
2736 goto repeat;
2737 }
2738 return;
15bbde2b 2739}
2740
2741/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2742 to look for shared sub-parts. */
2743
2744void
35cb5232 2745reset_used_flags (rtx x)
15bbde2b 2746{
19cb6b50 2747 int i, j;
2748 enum rtx_code code;
2749 const char *format_ptr;
0e0727c4 2750 int length;
15bbde2b 2751
0e0727c4 2752 /* Repeat is used to turn tail-recursion into iteration. */
2753repeat:
15bbde2b 2754 if (x == 0)
2755 return;
2756
2757 code = GET_CODE (x);
2758
c3418f42 2759 /* These types may be freely shared so we needn't do any resetting
15bbde2b 2760 for them. */
2761
2762 switch (code)
2763 {
2764 case REG:
688ff29b 2765 case DEBUG_EXPR:
2766 case VALUE:
15bbde2b 2767 case CONST_INT:
2768 case CONST_DOUBLE:
e397ad8e 2769 case CONST_FIXED:
886cfd4f 2770 case CONST_VECTOR:
15bbde2b 2771 case SYMBOL_REF:
2772 case CODE_LABEL:
2773 case PC:
2774 case CC0:
2775 return;
2776
9845d120 2777 case DEBUG_INSN:
15bbde2b 2778 case INSN:
2779 case JUMP_INSN:
2780 case CALL_INSN:
2781 case NOTE:
2782 case LABEL_REF:
2783 case BARRIER:
2784 /* The chain of insns is not being copied. */
2785 return;
d823ba47 2786
0dbd1c74 2787 default:
2788 break;
15bbde2b 2789 }
2790
7c25cb91 2791 RTX_FLAG (x, used) = 0;
15bbde2b 2792
2793 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2794 length = GET_RTX_LENGTH (code);
48e1416a 2795
0e0727c4 2796 for (i = 0; i < length; i++)
15bbde2b 2797 {
2798 switch (*format_ptr++)
2799 {
2800 case 'e':
0e0727c4 2801 if (i == length-1)
2802 {
2803 x = XEXP (x, i);
2804 goto repeat;
2805 }
15bbde2b 2806 reset_used_flags (XEXP (x, i));
2807 break;
2808
2809 case 'E':
2810 for (j = 0; j < XVECLEN (x, i); j++)
2811 reset_used_flags (XVECEXP (x, i, j));
2812 break;
2813 }
2814 }
2815}
1cd4cfea 2816
2817/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2818 to look for shared sub-parts. */
2819
2820void
2821set_used_flags (rtx x)
2822{
2823 int i, j;
2824 enum rtx_code code;
2825 const char *format_ptr;
2826
2827 if (x == 0)
2828 return;
2829
2830 code = GET_CODE (x);
2831
2832 /* These types may be freely shared so we needn't do any resetting
2833 for them. */
2834
2835 switch (code)
2836 {
2837 case REG:
688ff29b 2838 case DEBUG_EXPR:
2839 case VALUE:
1cd4cfea 2840 case CONST_INT:
2841 case CONST_DOUBLE:
e397ad8e 2842 case CONST_FIXED:
1cd4cfea 2843 case CONST_VECTOR:
2844 case SYMBOL_REF:
2845 case CODE_LABEL:
2846 case PC:
2847 case CC0:
2848 return;
2849
9845d120 2850 case DEBUG_INSN:
1cd4cfea 2851 case INSN:
2852 case JUMP_INSN:
2853 case CALL_INSN:
2854 case NOTE:
2855 case LABEL_REF:
2856 case BARRIER:
2857 /* The chain of insns is not being copied. */
2858 return;
2859
2860 default:
2861 break;
2862 }
2863
2864 RTX_FLAG (x, used) = 1;
2865
2866 format_ptr = GET_RTX_FORMAT (code);
2867 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2868 {
2869 switch (*format_ptr++)
2870 {
2871 case 'e':
2872 set_used_flags (XEXP (x, i));
2873 break;
2874
2875 case 'E':
2876 for (j = 0; j < XVECLEN (x, i); j++)
2877 set_used_flags (XVECEXP (x, i, j));
2878 break;
2879 }
2880 }
2881}
15bbde2b 2882\f
2883/* Copy X if necessary so that it won't be altered by changes in OTHER.
2884 Return X or the rtx for the pseudo reg the value of X was copied into.
2885 OTHER must be valid as a SET_DEST. */
2886
2887rtx
35cb5232 2888make_safe_from (rtx x, rtx other)
15bbde2b 2889{
2890 while (1)
2891 switch (GET_CODE (other))
2892 {
2893 case SUBREG:
2894 other = SUBREG_REG (other);
2895 break;
2896 case STRICT_LOW_PART:
2897 case SIGN_EXTEND:
2898 case ZERO_EXTEND:
2899 other = XEXP (other, 0);
2900 break;
2901 default:
2902 goto done;
2903 }
2904 done:
e16ceb8e 2905 if ((MEM_P (other)
15bbde2b 2906 && ! CONSTANT_P (x)
8ad4c111 2907 && !REG_P (x)
15bbde2b 2908 && GET_CODE (x) != SUBREG)
8ad4c111 2909 || (REG_P (other)
15bbde2b 2910 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2911 || reg_mentioned_p (other, x))))
2912 {
2913 rtx temp = gen_reg_rtx (GET_MODE (x));
2914 emit_move_insn (temp, x);
2915 return temp;
2916 }
2917 return x;
2918}
2919\f
2920/* Emission of insns (adding them to the doubly-linked list). */
2921
15bbde2b 2922/* Return the last insn emitted, even if it is in a sequence now pushed. */
2923
2924rtx
35cb5232 2925get_last_insn_anywhere (void)
15bbde2b 2926{
2927 struct sequence_stack *stack;
06f9d6ef 2928 if (get_last_insn ())
2929 return get_last_insn ();
0a893c29 2930 for (stack = seq_stack; stack; stack = stack->next)
15bbde2b 2931 if (stack->last != 0)
2932 return stack->last;
2933 return 0;
2934}
2935
70545de4 2936/* Return the first nonnote insn emitted in current sequence or current
2937 function. This routine looks inside SEQUENCEs. */
2938
2939rtx
35cb5232 2940get_first_nonnote_insn (void)
70545de4 2941{
06f9d6ef 2942 rtx insn = get_insns ();
f86e856e 2943
2944 if (insn)
2945 {
2946 if (NOTE_P (insn))
2947 for (insn = next_insn (insn);
2948 insn && NOTE_P (insn);
2949 insn = next_insn (insn))
2950 continue;
2951 else
2952 {
1c14a50e 2953 if (NONJUMP_INSN_P (insn)
f86e856e 2954 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2955 insn = XVECEXP (PATTERN (insn), 0, 0);
2956 }
2957 }
70545de4 2958
2959 return insn;
2960}
2961
2962/* Return the last nonnote insn emitted in current sequence or current
2963 function. This routine looks inside SEQUENCEs. */
2964
2965rtx
35cb5232 2966get_last_nonnote_insn (void)
70545de4 2967{
06f9d6ef 2968 rtx insn = get_last_insn ();
f86e856e 2969
2970 if (insn)
2971 {
2972 if (NOTE_P (insn))
2973 for (insn = previous_insn (insn);
2974 insn && NOTE_P (insn);
2975 insn = previous_insn (insn))
2976 continue;
2977 else
2978 {
1c14a50e 2979 if (NONJUMP_INSN_P (insn)
f86e856e 2980 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2981 insn = XVECEXP (PATTERN (insn), 0,
2982 XVECLEN (PATTERN (insn), 0) - 1);
2983 }
2984 }
70545de4 2985
2986 return insn;
2987}
2988
9845d120 2989/* Return the number of actual (non-debug) insns emitted in this
2990 function. */
2991
2992int
2993get_max_insn_count (void)
2994{
2995 int n = cur_insn_uid;
2996
2997 /* The table size must be stable across -g, to avoid codegen
2998 differences due to debug insns, and not be affected by
2999 -fmin-insn-uid, to avoid excessive table size and to simplify
3000 debugging of -fcompare-debug failures. */
3001 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3002 n -= cur_debug_insn_uid;
3003 else
3004 n -= MIN_NONDEBUG_INSN_UID;
3005
3006 return n;
3007}
3008
15bbde2b 3009\f
3010/* Return the next insn. If it is a SEQUENCE, return the first insn
3011 of the sequence. */
3012
3013rtx
35cb5232 3014next_insn (rtx insn)
15bbde2b 3015{
ce4469fa 3016 if (insn)
3017 {
3018 insn = NEXT_INSN (insn);
3019 if (insn && NONJUMP_INSN_P (insn)
3020 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3021 insn = XVECEXP (PATTERN (insn), 0, 0);
3022 }
15bbde2b 3023
ce4469fa 3024 return insn;
15bbde2b 3025}
3026
3027/* Return the previous insn. If it is a SEQUENCE, return the last insn
3028 of the sequence. */
3029
3030rtx
35cb5232 3031previous_insn (rtx insn)
15bbde2b 3032{
ce4469fa 3033 if (insn)
3034 {
3035 insn = PREV_INSN (insn);
3036 if (insn && NONJUMP_INSN_P (insn)
3037 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3038 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3039 }
15bbde2b 3040
ce4469fa 3041 return insn;
15bbde2b 3042}
3043
3044/* Return the next insn after INSN that is not a NOTE. This routine does not
3045 look inside SEQUENCEs. */
3046
3047rtx
35cb5232 3048next_nonnote_insn (rtx insn)
15bbde2b 3049{
ce4469fa 3050 while (insn)
3051 {
3052 insn = NEXT_INSN (insn);
3053 if (insn == 0 || !NOTE_P (insn))
3054 break;
3055 }
15bbde2b 3056
ce4469fa 3057 return insn;
15bbde2b 3058}
3059
c4d13c5c 3060/* Return the next insn after INSN that is not a NOTE, but stop the
3061 search before we enter another basic block. This routine does not
3062 look inside SEQUENCEs. */
3063
3064rtx
3065next_nonnote_insn_bb (rtx insn)
3066{
3067 while (insn)
3068 {
3069 insn = NEXT_INSN (insn);
3070 if (insn == 0 || !NOTE_P (insn))
3071 break;
3072 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3073 return NULL_RTX;
3074 }
3075
3076 return insn;
3077}
3078
15bbde2b 3079/* Return the previous insn before INSN that is not a NOTE. This routine does
3080 not look inside SEQUENCEs. */
3081
3082rtx
35cb5232 3083prev_nonnote_insn (rtx insn)
15bbde2b 3084{
ce4469fa 3085 while (insn)
3086 {
3087 insn = PREV_INSN (insn);
3088 if (insn == 0 || !NOTE_P (insn))
3089 break;
3090 }
15bbde2b 3091
ce4469fa 3092 return insn;
15bbde2b 3093}
3094
bcc66782 3095/* Return the previous insn before INSN that is not a NOTE, but stop
3096 the search before we enter another basic block. This routine does
3097 not look inside SEQUENCEs. */
3098
3099rtx
3100prev_nonnote_insn_bb (rtx insn)
3101{
3102 while (insn)
3103 {
3104 insn = PREV_INSN (insn);
3105 if (insn == 0 || !NOTE_P (insn))
3106 break;
3107 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3108 return NULL_RTX;
3109 }
3110
3111 return insn;
3112}
3113
9845d120 3114/* Return the next insn after INSN that is not a DEBUG_INSN. This
3115 routine does not look inside SEQUENCEs. */
3116
3117rtx
3118next_nondebug_insn (rtx insn)
3119{
3120 while (insn)
3121 {
3122 insn = NEXT_INSN (insn);
3123 if (insn == 0 || !DEBUG_INSN_P (insn))
3124 break;
3125 }
3126
3127 return insn;
3128}
3129
3130/* Return the previous insn before INSN that is not a DEBUG_INSN.
3131 This routine does not look inside SEQUENCEs. */
3132
3133rtx
3134prev_nondebug_insn (rtx insn)
3135{
3136 while (insn)
3137 {
3138 insn = PREV_INSN (insn);
3139 if (insn == 0 || !DEBUG_INSN_P (insn))
3140 break;
3141 }
3142
3143 return insn;
3144}
3145
5b8537a8 3146/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3147 This routine does not look inside SEQUENCEs. */
3148
3149rtx
3150next_nonnote_nondebug_insn (rtx insn)
3151{
3152 while (insn)
3153 {
3154 insn = NEXT_INSN (insn);
3155 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3156 break;
3157 }
3158
3159 return insn;
3160}
3161
3162/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3163 This routine does not look inside SEQUENCEs. */
3164
3165rtx
3166prev_nonnote_nondebug_insn (rtx insn)
3167{
3168 while (insn)
3169 {
3170 insn = PREV_INSN (insn);
3171 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3172 break;
3173 }
3174
3175 return insn;
3176}
3177
15bbde2b 3178/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3179 or 0, if there is none. This routine does not look inside
a92771b8 3180 SEQUENCEs. */
15bbde2b 3181
3182rtx
35cb5232 3183next_real_insn (rtx insn)
15bbde2b 3184{
ce4469fa 3185 while (insn)
3186 {
3187 insn = NEXT_INSN (insn);
3188 if (insn == 0 || INSN_P (insn))
3189 break;
3190 }
15bbde2b 3191
ce4469fa 3192 return insn;
15bbde2b 3193}
3194
3195/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3196 or 0, if there is none. This routine does not look inside
3197 SEQUENCEs. */
3198
3199rtx
35cb5232 3200prev_real_insn (rtx insn)
15bbde2b 3201{
ce4469fa 3202 while (insn)
3203 {
3204 insn = PREV_INSN (insn);
3205 if (insn == 0 || INSN_P (insn))
3206 break;
3207 }
15bbde2b 3208
ce4469fa 3209 return insn;
15bbde2b 3210}
3211
d5f9786f 3212/* Return the last CALL_INSN in the current list, or 0 if there is none.
3213 This routine does not look inside SEQUENCEs. */
3214
3215rtx
35cb5232 3216last_call_insn (void)
d5f9786f 3217{
3218 rtx insn;
3219
3220 for (insn = get_last_insn ();
6d7dc5b9 3221 insn && !CALL_P (insn);
d5f9786f 3222 insn = PREV_INSN (insn))
3223 ;
3224
3225 return insn;
3226}
3227
15bbde2b 3228/* Find the next insn after INSN that really does something. This routine
084950ee 3229 does not look inside SEQUENCEs. After reload this also skips over
3230 standalone USE and CLOBBER insn. */
15bbde2b 3231
2215ca0d 3232int
52d07779 3233active_insn_p (const_rtx insn)
2215ca0d 3234{
6d7dc5b9 3235 return (CALL_P (insn) || JUMP_P (insn)
3236 || (NONJUMP_INSN_P (insn)
3a66feab 3237 && (! reload_completed
3238 || (GET_CODE (PATTERN (insn)) != USE
3239 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3240}
3241
15bbde2b 3242rtx
35cb5232 3243next_active_insn (rtx insn)
15bbde2b 3244{
ce4469fa 3245 while (insn)
3246 {
3247 insn = NEXT_INSN (insn);
3248 if (insn == 0 || active_insn_p (insn))
3249 break;
3250 }
15bbde2b 3251
ce4469fa 3252 return insn;
15bbde2b 3253}
3254
3255/* Find the last insn before INSN that really does something. This routine
084950ee 3256 does not look inside SEQUENCEs. After reload this also skips over
3257 standalone USE and CLOBBER insn. */
15bbde2b 3258
3259rtx
35cb5232 3260prev_active_insn (rtx insn)
15bbde2b 3261{
ce4469fa 3262 while (insn)
3263 {
3264 insn = PREV_INSN (insn);
3265 if (insn == 0 || active_insn_p (insn))
3266 break;
3267 }
15bbde2b 3268
ce4469fa 3269 return insn;
15bbde2b 3270}
3271
3272/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3273
3274rtx
35cb5232 3275next_label (rtx insn)
15bbde2b 3276{
ce4469fa 3277 while (insn)
3278 {
3279 insn = NEXT_INSN (insn);
3280 if (insn == 0 || LABEL_P (insn))
3281 break;
3282 }
15bbde2b 3283
ce4469fa 3284 return insn;
15bbde2b 3285}
3286
3287/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3288
3289rtx
35cb5232 3290prev_label (rtx insn)
15bbde2b 3291{
ce4469fa 3292 while (insn)
3293 {
3294 insn = PREV_INSN (insn);
3295 if (insn == 0 || LABEL_P (insn))
3296 break;
3297 }
15bbde2b 3298
ce4469fa 3299 return insn;
15bbde2b 3300}
67c5e2a9 3301
3302/* Return the last label to mark the same position as LABEL. Return null
3303 if LABEL itself is null. */
3304
3305rtx
3306skip_consecutive_labels (rtx label)
3307{
3308 rtx insn;
3309
3310 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3311 if (LABEL_P (insn))
3312 label = insn;
3313
3314 return label;
3315}
15bbde2b 3316\f
3317#ifdef HAVE_cc0
b15e0bba 3318/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3319 and REG_CC_USER notes so we can find it. */
3320
3321void
35cb5232 3322link_cc0_insns (rtx insn)
b15e0bba 3323{
3324 rtx user = next_nonnote_insn (insn);
3325
6d7dc5b9 3326 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
b15e0bba 3327 user = XVECEXP (PATTERN (user), 0, 0);
3328
a1ddb869 3329 add_reg_note (user, REG_CC_SETTER, insn);
3330 add_reg_note (insn, REG_CC_USER, user);
b15e0bba 3331}
3332
15bbde2b 3333/* Return the next insn that uses CC0 after INSN, which is assumed to
3334 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3335 applied to the result of this function should yield INSN).
3336
3337 Normally, this is simply the next insn. However, if a REG_CC_USER note
3338 is present, it contains the insn that uses CC0.
3339
3340 Return 0 if we can't find the insn. */
3341
3342rtx
35cb5232 3343next_cc0_user (rtx insn)
15bbde2b 3344{
b572011e 3345 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3346
3347 if (note)
3348 return XEXP (note, 0);
3349
3350 insn = next_nonnote_insn (insn);
6d7dc5b9 3351 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
15bbde2b 3352 insn = XVECEXP (PATTERN (insn), 0, 0);
3353
9204e736 3354 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
15bbde2b 3355 return insn;
3356
3357 return 0;
3358}
3359
3360/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3361 note, it is the previous insn. */
3362
3363rtx
35cb5232 3364prev_cc0_setter (rtx insn)
15bbde2b 3365{
b572011e 3366 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3367
3368 if (note)
3369 return XEXP (note, 0);
3370
3371 insn = prev_nonnote_insn (insn);
611234b4 3372 gcc_assert (sets_cc0_p (PATTERN (insn)));
15bbde2b 3373
3374 return insn;
3375}
3376#endif
344dc2fa 3377
698ff1f0 3378#ifdef AUTO_INC_DEC
3379/* Find a RTX_AUTOINC class rtx which matches DATA. */
3380
3381static int
3382find_auto_inc (rtx *xp, void *data)
3383{
3384 rtx x = *xp;
225ab426 3385 rtx reg = (rtx) data;
698ff1f0 3386
3387 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3388 return 0;
3389
3390 switch (GET_CODE (x))
3391 {
3392 case PRE_DEC:
3393 case PRE_INC:
3394 case POST_DEC:
3395 case POST_INC:
3396 case PRE_MODIFY:
3397 case POST_MODIFY:
3398 if (rtx_equal_p (reg, XEXP (x, 0)))
3399 return 1;
3400 break;
3401
3402 default:
3403 gcc_unreachable ();
3404 }
3405 return -1;
3406}
3407#endif
3408
344dc2fa 3409/* Increment the label uses for all labels present in rtx. */
3410
3411static void
35cb5232 3412mark_label_nuses (rtx x)
344dc2fa 3413{
19cb6b50 3414 enum rtx_code code;
3415 int i, j;
3416 const char *fmt;
344dc2fa 3417
3418 code = GET_CODE (x);
a030d4a8 3419 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
344dc2fa 3420 LABEL_NUSES (XEXP (x, 0))++;
3421
3422 fmt = GET_RTX_FORMAT (code);
3423 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3424 {
3425 if (fmt[i] == 'e')
ff385626 3426 mark_label_nuses (XEXP (x, i));
344dc2fa 3427 else if (fmt[i] == 'E')
ff385626 3428 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3429 mark_label_nuses (XVECEXP (x, i, j));
3430 }
3431}
3432
15bbde2b 3433\f
3434/* Try splitting insns that can be split for better scheduling.
3435 PAT is the pattern which might split.
3436 TRIAL is the insn providing PAT.
6ef828f9 3437 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3438
3439 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3440 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3441 returns TRIAL. If the insn to be returned can be split, it will be. */
3442
3443rtx
35cb5232 3444try_split (rtx pat, rtx trial, int last)
15bbde2b 3445{
3446 rtx before = PREV_INSN (trial);
3447 rtx after = NEXT_INSN (trial);
15bbde2b 3448 int has_barrier = 0;
1e5b92fa 3449 rtx note, seq, tem;
3cd757b1 3450 int probability;
e13693ec 3451 rtx insn_last, insn;
3452 int njumps = 0;
3cd757b1 3453
25e880b1 3454 /* We're not good at redistributing frame information. */
3455 if (RTX_FRAME_RELATED_P (trial))
3456 return trial;
3457
3cd757b1 3458 if (any_condjump_p (trial)
3459 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3460 split_branch_probability = INTVAL (XEXP (note, 0));
3461 probability = split_branch_probability;
3462
3463 seq = split_insns (pat, trial);
3464
3465 split_branch_probability = -1;
15bbde2b 3466
3467 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3468 We may need to handle this specially. */
6d7dc5b9 3469 if (after && BARRIER_P (after))
15bbde2b 3470 {
3471 has_barrier = 1;
3472 after = NEXT_INSN (after);
3473 }
3474
e13693ec 3475 if (!seq)
3476 return trial;
3477
3478 /* Avoid infinite loop if any insn of the result matches
3479 the original pattern. */
3480 insn_last = seq;
3481 while (1)
15bbde2b 3482 {
e13693ec 3483 if (INSN_P (insn_last)
3484 && rtx_equal_p (PATTERN (insn_last), pat))
3485 return trial;
3486 if (!NEXT_INSN (insn_last))
3487 break;
3488 insn_last = NEXT_INSN (insn_last);
3489 }
d823ba47 3490
3072d30e 3491 /* We will be adding the new sequence to the function. The splitters
3492 may have introduced invalid RTL sharing, so unshare the sequence now. */
3493 unshare_all_rtl_in_chain (seq);
3494
e13693ec 3495 /* Mark labels. */
3496 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3497 {
6d7dc5b9 3498 if (JUMP_P (insn))
e13693ec 3499 {
3500 mark_jump_label (PATTERN (insn), insn, 0);
3501 njumps++;
3502 if (probability != -1
3503 && any_condjump_p (insn)
3504 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3505 {
e13693ec 3506 /* We can preserve the REG_BR_PROB notes only if exactly
3507 one jump is created, otherwise the machine description
3508 is responsible for this step using
3509 split_branch_probability variable. */
611234b4 3510 gcc_assert (njumps == 1);
a1ddb869 3511 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
31d3e01c 3512 }
e13693ec 3513 }
3514 }
3515
3516 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3517 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
6d7dc5b9 3518 if (CALL_P (trial))
e13693ec 3519 {
3520 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
6d7dc5b9 3521 if (CALL_P (insn))
e13693ec 3522 {
0bb5a6cd 3523 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3524 while (*p)
3525 p = &XEXP (*p, 1);
3526 *p = CALL_INSN_FUNCTION_USAGE (trial);
e13693ec 3527 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
b922281a 3528
3529 /* Update the debug information for the CALL_INSN. */
3530 if (flag_enable_icf_debug)
3531 (*debug_hooks->copy_call_info) (trial, insn);
e13693ec 3532 }
3533 }
5262c253 3534
e13693ec 3535 /* Copy notes, particularly those related to the CFG. */
3536 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3537 {
3538 switch (REG_NOTE_KIND (note))
3539 {
3540 case REG_EH_REGION:
e38def9c 3541 copy_reg_eh_region_note_backward (note, insn_last, NULL);
e13693ec 3542 break;
381eb1e7 3543
e13693ec 3544 case REG_NORETURN:
3545 case REG_SETJMP:
698ff1f0 3546 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
381eb1e7 3547 {
6d7dc5b9 3548 if (CALL_P (insn))
a1ddb869 3549 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
381eb1e7 3550 }
e13693ec 3551 break;
5bb27a4b 3552
e13693ec 3553 case REG_NON_LOCAL_GOTO:
698ff1f0 3554 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
31d3e01c 3555 {
6d7dc5b9 3556 if (JUMP_P (insn))
a1ddb869 3557 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
31d3e01c 3558 }
e13693ec 3559 break;
344dc2fa 3560
698ff1f0 3561#ifdef AUTO_INC_DEC
3562 case REG_INC:
3563 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3564 {
3565 rtx reg = XEXP (note, 0);
3566 if (!FIND_REG_INC_NOTE (insn, reg)
3567 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
a1ddb869 3568 add_reg_note (insn, REG_INC, reg);
698ff1f0 3569 }
3570 break;
3571#endif
3572
e13693ec 3573 default:
3574 break;
15bbde2b 3575 }
e13693ec 3576 }
3577
3578 /* If there are LABELS inside the split insns increment the
3579 usage count so we don't delete the label. */
19d2fe05 3580 if (INSN_P (trial))
e13693ec 3581 {
3582 insn = insn_last;
3583 while (insn != NULL_RTX)
15bbde2b 3584 {
19d2fe05 3585 /* JUMP_P insns have already been "marked" above. */
6d7dc5b9 3586 if (NONJUMP_INSN_P (insn))
e13693ec 3587 mark_label_nuses (PATTERN (insn));
15bbde2b 3588
e13693ec 3589 insn = PREV_INSN (insn);
3590 }
15bbde2b 3591 }
3592
13751393 3593 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
e13693ec 3594
3595 delete_insn (trial);
3596 if (has_barrier)
3597 emit_barrier_after (tem);
3598
3599 /* Recursively call try_split for each new insn created; by the
3600 time control returns here that insn will be fully split, so
3601 set LAST and continue from the insn after the one returned.
3602 We can't use next_active_insn here since AFTER may be a note.
3603 Ignore deleted insns, which can be occur if not optimizing. */
3604 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3605 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3606 tem = try_split (PATTERN (tem), tem, 1);
3607
3608 /* Return either the first or the last insn, depending on which was
3609 requested. */
3610 return last
06f9d6ef 3611 ? (after ? PREV_INSN (after) : get_last_insn ())
e13693ec 3612 : NEXT_INSN (before);
15bbde2b 3613}
3614\f
3615/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3616 Store PATTERN in the pattern slots. */
15bbde2b 3617
3618rtx
35cb5232 3619make_insn_raw (rtx pattern)
15bbde2b 3620{
19cb6b50 3621 rtx insn;
15bbde2b 3622
d7c47c0e 3623 insn = rtx_alloc (INSN);
15bbde2b 3624
575333f9 3625 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3626 PATTERN (insn) = pattern;
3627 INSN_CODE (insn) = -1;
fc92fa61 3628 REG_NOTES (insn) = NULL;
375c1c8a 3629 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3630 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3631
fe7f701d 3632#ifdef ENABLE_RTL_CHECKING
3633 if (insn
9204e736 3634 && INSN_P (insn)
fe7f701d 3635 && (returnjump_p (insn)
3636 || (GET_CODE (insn) == SET
3637 && SET_DEST (insn) == pc_rtx)))
3638 {
c3ceba8e 3639 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
fe7f701d 3640 debug_rtx (insn);
3641 }
3642#endif
d823ba47 3643
15bbde2b 3644 return insn;
3645}
3646
9845d120 3647/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3648
3649rtx
3650make_debug_insn_raw (rtx pattern)
3651{
3652 rtx insn;
3653
3654 insn = rtx_alloc (DEBUG_INSN);
3655 INSN_UID (insn) = cur_debug_insn_uid++;
3656 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3657 INSN_UID (insn) = cur_insn_uid++;
3658
3659 PATTERN (insn) = pattern;
3660 INSN_CODE (insn) = -1;
3661 REG_NOTES (insn) = NULL;
3662 INSN_LOCATOR (insn) = curr_insn_locator ();
3663 BLOCK_FOR_INSN (insn) = NULL;
3664
3665 return insn;
3666}
3667
31d3e01c 3668/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3669
89140b26 3670rtx
35cb5232 3671make_jump_insn_raw (rtx pattern)
15bbde2b 3672{
19cb6b50 3673 rtx insn;
15bbde2b 3674
6a84e367 3675 insn = rtx_alloc (JUMP_INSN);
fc92fa61 3676 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3677
3678 PATTERN (insn) = pattern;
3679 INSN_CODE (insn) = -1;
fc92fa61 3680 REG_NOTES (insn) = NULL;
3681 JUMP_LABEL (insn) = NULL;
375c1c8a 3682 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3683 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3684
3685 return insn;
3686}
6e911104 3687
31d3e01c 3688/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3689
3690static rtx
35cb5232 3691make_call_insn_raw (rtx pattern)
6e911104 3692{
19cb6b50 3693 rtx insn;
6e911104 3694
3695 insn = rtx_alloc (CALL_INSN);
3696 INSN_UID (insn) = cur_insn_uid++;
3697
3698 PATTERN (insn) = pattern;
3699 INSN_CODE (insn) = -1;
6e911104 3700 REG_NOTES (insn) = NULL;
3701 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
375c1c8a 3702 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3703 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3704
3705 return insn;
3706}
15bbde2b 3707\f
3708/* Add INSN to the end of the doubly-linked list.
3709 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3710
3711void
35cb5232 3712add_insn (rtx insn)
15bbde2b 3713{
06f9d6ef 3714 PREV_INSN (insn) = get_last_insn();
15bbde2b 3715 NEXT_INSN (insn) = 0;
3716
06f9d6ef 3717 if (NULL != get_last_insn())
3718 NEXT_INSN (get_last_insn ()) = insn;
15bbde2b 3719
06f9d6ef 3720 if (NULL == get_insns ())
3721 set_first_insn (insn);
15bbde2b 3722
06f9d6ef 3723 set_last_insn (insn);
15bbde2b 3724}
3725
312de84d 3726/* Add INSN into the doubly-linked list after insn AFTER. This and
3727 the next should be the only functions called to insert an insn once
f65c10c0 3728 delay slots have been filled since only they know how to update a
312de84d 3729 SEQUENCE. */
15bbde2b 3730
3731void
3072d30e 3732add_insn_after (rtx insn, rtx after, basic_block bb)
15bbde2b 3733{
3734 rtx next = NEXT_INSN (after);
3735
611234b4 3736 gcc_assert (!optimize || !INSN_DELETED_P (after));
f65c10c0 3737
15bbde2b 3738 NEXT_INSN (insn) = next;
3739 PREV_INSN (insn) = after;
3740
3741 if (next)
3742 {
3743 PREV_INSN (next) = insn;
6d7dc5b9 3744 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
15bbde2b 3745 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3746 }
06f9d6ef 3747 else if (get_last_insn () == after)
3748 set_last_insn (insn);
15bbde2b 3749 else
3750 {
0a893c29 3751 struct sequence_stack *stack = seq_stack;
15bbde2b 3752 /* Scan all pending sequences too. */
3753 for (; stack; stack = stack->next)
3754 if (after == stack->last)
398f4855 3755 {
3756 stack->last = insn;
3757 break;
3758 }
312de84d 3759
611234b4 3760 gcc_assert (stack);
15bbde2b 3761 }
3762
6d7dc5b9 3763 if (!BARRIER_P (after)
3764 && !BARRIER_P (insn)
9dda7915 3765 && (bb = BLOCK_FOR_INSN (after)))
3766 {
3767 set_block_for_insn (insn, bb);
308f9b79 3768 if (INSN_P (insn))
3072d30e 3769 df_insn_rescan (insn);
9dda7915 3770 /* Should not happen as first in the BB is always
3fb1e43b 3771 either NOTE or LABEL. */
5496dbfc 3772 if (BB_END (bb) == after
9dda7915 3773 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 3774 && !BARRIER_P (insn)
ad4583d9 3775 && !NOTE_INSN_BASIC_BLOCK_P (insn))
5496dbfc 3776 BB_END (bb) = insn;
9dda7915 3777 }
3778
15bbde2b 3779 NEXT_INSN (after) = insn;
6d7dc5b9 3780 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
15bbde2b 3781 {
3782 rtx sequence = PATTERN (after);
3783 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3784 }
3785}
3786
312de84d 3787/* Add INSN into the doubly-linked list before insn BEFORE. This and
3072d30e 3788 the previous should be the only functions called to insert an insn
3789 once delay slots have been filled since only they know how to
3790 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3791 bb from before. */
312de84d 3792
3793void
3072d30e 3794add_insn_before (rtx insn, rtx before, basic_block bb)
312de84d 3795{
3796 rtx prev = PREV_INSN (before);
3797
611234b4 3798 gcc_assert (!optimize || !INSN_DELETED_P (before));
f65c10c0 3799
312de84d 3800 PREV_INSN (insn) = prev;
3801 NEXT_INSN (insn) = before;
3802
3803 if (prev)
3804 {
3805 NEXT_INSN (prev) = insn;
6d7dc5b9 3806 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
312de84d 3807 {
3808 rtx sequence = PATTERN (prev);
3809 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3810 }
3811 }
06f9d6ef 3812 else if (get_insns () == before)
3813 set_first_insn (insn);
312de84d 3814 else
3815 {
0a893c29 3816 struct sequence_stack *stack = seq_stack;
312de84d 3817 /* Scan all pending sequences too. */
3818 for (; stack; stack = stack->next)
3819 if (before == stack->first)
398f4855 3820 {
3821 stack->first = insn;
3822 break;
3823 }
312de84d 3824
611234b4 3825 gcc_assert (stack);
312de84d 3826 }
3827
48e1416a 3828 if (!bb
3072d30e 3829 && !BARRIER_P (before)
3830 && !BARRIER_P (insn))
3831 bb = BLOCK_FOR_INSN (before);
3832
3833 if (bb)
9dda7915 3834 {
3835 set_block_for_insn (insn, bb);
308f9b79 3836 if (INSN_P (insn))
3072d30e 3837 df_insn_rescan (insn);
611234b4 3838 /* Should not happen as first in the BB is always either NOTE or
ba821eb1 3839 LABEL. */
611234b4 3840 gcc_assert (BB_HEAD (bb) != insn
3841 /* Avoid clobbering of structure when creating new BB. */
3842 || BARRIER_P (insn)
ad4583d9 3843 || NOTE_INSN_BASIC_BLOCK_P (insn));
9dda7915 3844 }
3845
312de84d 3846 PREV_INSN (before) = insn;
6d7dc5b9 3847 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
312de84d 3848 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3849}
3850
3072d30e 3851
3852/* Replace insn with an deleted instruction note. */
3853
fc3d1695 3854void
3855set_insn_deleted (rtx insn)
3072d30e 3856{
3857 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3858 PUT_CODE (insn, NOTE);
3859 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3860}
3861
3862
7ddcf2bf 3863/* Remove an insn from its doubly-linked list. This function knows how
3864 to handle sequences. */
3865void
35cb5232 3866remove_insn (rtx insn)
7ddcf2bf 3867{
3868 rtx next = NEXT_INSN (insn);
3869 rtx prev = PREV_INSN (insn);
e4bf866d 3870 basic_block bb;
3871
3072d30e 3872 /* Later in the code, the block will be marked dirty. */
3873 df_insn_delete (NULL, INSN_UID (insn));
3874
7ddcf2bf 3875 if (prev)
3876 {
3877 NEXT_INSN (prev) = next;
6d7dc5b9 3878 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
7ddcf2bf 3879 {
3880 rtx sequence = PATTERN (prev);
3881 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3882 }
3883 }
06f9d6ef 3884 else if (get_insns () == insn)
3885 {
c8f0c143 3886 if (next)
3887 PREV_INSN (next) = NULL;
06f9d6ef 3888 set_first_insn (next);
3889 }
7ddcf2bf 3890 else
3891 {
0a893c29 3892 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3893 /* Scan all pending sequences too. */
3894 for (; stack; stack = stack->next)
3895 if (insn == stack->first)
3896 {
3897 stack->first = next;
3898 break;
3899 }
3900
611234b4 3901 gcc_assert (stack);
7ddcf2bf 3902 }
3903
3904 if (next)
3905 {
3906 PREV_INSN (next) = prev;
6d7dc5b9 3907 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
7ddcf2bf 3908 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3909 }
06f9d6ef 3910 else if (get_last_insn () == insn)
3911 set_last_insn (prev);
7ddcf2bf 3912 else
3913 {
0a893c29 3914 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3915 /* Scan all pending sequences too. */
3916 for (; stack; stack = stack->next)
3917 if (insn == stack->last)
3918 {
3919 stack->last = prev;
3920 break;
3921 }
3922
611234b4 3923 gcc_assert (stack);
7ddcf2bf 3924 }
6d7dc5b9 3925 if (!BARRIER_P (insn)
e4bf866d 3926 && (bb = BLOCK_FOR_INSN (insn)))
3927 {
308f9b79 3928 if (INSN_P (insn))
3072d30e 3929 df_set_bb_dirty (bb);
5496dbfc 3930 if (BB_HEAD (bb) == insn)
e4bf866d 3931 {
f4aee538 3932 /* Never ever delete the basic block note without deleting whole
3933 basic block. */
611234b4 3934 gcc_assert (!NOTE_P (insn));
5496dbfc 3935 BB_HEAD (bb) = next;
e4bf866d 3936 }
5496dbfc 3937 if (BB_END (bb) == insn)
3938 BB_END (bb) = prev;
e4bf866d 3939 }
7ddcf2bf 3940}
3941
d5f9786f 3942/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3943
3944void
35cb5232 3945add_function_usage_to (rtx call_insn, rtx call_fusage)
d5f9786f 3946{
611234b4 3947 gcc_assert (call_insn && CALL_P (call_insn));
d5f9786f 3948
3949 /* Put the register usage information on the CALL. If there is already
3950 some usage information, put ours at the end. */
3951 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3952 {
3953 rtx link;
3954
3955 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3956 link = XEXP (link, 1))
3957 ;
3958
3959 XEXP (link, 1) = call_fusage;
3960 }
3961 else
3962 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3963}
3964
15bbde2b 3965/* Delete all insns made since FROM.
3966 FROM becomes the new last instruction. */
3967
3968void
35cb5232 3969delete_insns_since (rtx from)
15bbde2b 3970{
3971 if (from == 0)
06f9d6ef 3972 set_first_insn (0);
15bbde2b 3973 else
3974 NEXT_INSN (from) = 0;
06f9d6ef 3975 set_last_insn (from);
15bbde2b 3976}
3977
34e2ddcd 3978/* This function is deprecated, please use sequences instead.
3979
3980 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 3981 The insns to be moved are those between FROM and TO.
3982 They are moved to a new position after the insn AFTER.
3983 AFTER must not be FROM or TO or any insn in between.
3984
3985 This function does not know about SEQUENCEs and hence should not be
3986 called after delay-slot filling has been done. */
3987
3988void
35cb5232 3989reorder_insns_nobb (rtx from, rtx to, rtx after)
15bbde2b 3990{
3991 /* Splice this bunch out of where it is now. */
3992 if (PREV_INSN (from))
3993 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3994 if (NEXT_INSN (to))
3995 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
06f9d6ef 3996 if (get_last_insn () == to)
3997 set_last_insn (PREV_INSN (from));
3998 if (get_insns () == from)
3999 set_first_insn (NEXT_INSN (to));
15bbde2b 4000
4001 /* Make the new neighbors point to it and it to them. */
4002 if (NEXT_INSN (after))
4003 PREV_INSN (NEXT_INSN (after)) = to;
4004
4005 NEXT_INSN (to) = NEXT_INSN (after);
4006 PREV_INSN (from) = after;
4007 NEXT_INSN (after) = from;
06f9d6ef 4008 if (after == get_last_insn())
4009 set_last_insn (to);
15bbde2b 4010}
4011
9dda7915 4012/* Same as function above, but take care to update BB boundaries. */
4013void
35cb5232 4014reorder_insns (rtx from, rtx to, rtx after)
9dda7915 4015{
4016 rtx prev = PREV_INSN (from);
4017 basic_block bb, bb2;
4018
4019 reorder_insns_nobb (from, to, after);
4020
6d7dc5b9 4021 if (!BARRIER_P (after)
9dda7915 4022 && (bb = BLOCK_FOR_INSN (after)))
4023 {
4024 rtx x;
3072d30e 4025 df_set_bb_dirty (bb);
d4c5e26d 4026
6d7dc5b9 4027 if (!BARRIER_P (from)
9dda7915 4028 && (bb2 = BLOCK_FOR_INSN (from)))
4029 {
5496dbfc 4030 if (BB_END (bb2) == to)
4031 BB_END (bb2) = prev;
3072d30e 4032 df_set_bb_dirty (bb2);
9dda7915 4033 }
4034
5496dbfc 4035 if (BB_END (bb) == after)
4036 BB_END (bb) = to;
9dda7915 4037
4038 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7097dd0c 4039 if (!BARRIER_P (x))
a2bdd643 4040 df_insn_change_bb (x, bb);
9dda7915 4041 }
4042}
4043
15bbde2b 4044\f
31d3e01c 4045/* Emit insn(s) of given code and pattern
4046 at a specified place within the doubly-linked list.
15bbde2b 4047
31d3e01c 4048 All of the emit_foo global entry points accept an object
4049 X which is either an insn list or a PATTERN of a single
4050 instruction.
15bbde2b 4051
31d3e01c 4052 There are thus a few canonical ways to generate code and
4053 emit it at a specific place in the instruction stream. For
4054 example, consider the instruction named SPOT and the fact that
4055 we would like to emit some instructions before SPOT. We might
4056 do it like this:
15bbde2b 4057
31d3e01c 4058 start_sequence ();
4059 ... emit the new instructions ...
4060 insns_head = get_insns ();
4061 end_sequence ();
15bbde2b 4062
31d3e01c 4063 emit_insn_before (insns_head, SPOT);
15bbde2b 4064
31d3e01c 4065 It used to be common to generate SEQUENCE rtl instead, but that
4066 is a relic of the past which no longer occurs. The reason is that
4067 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4068 generated would almost certainly die right after it was created. */
15bbde2b 4069
31d3e01c 4070/* Make X be output before the instruction BEFORE. */
15bbde2b 4071
4072rtx
3072d30e 4073emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
15bbde2b 4074{
31d3e01c 4075 rtx last = before;
19cb6b50 4076 rtx insn;
15bbde2b 4077
611234b4 4078 gcc_assert (before);
31d3e01c 4079
4080 if (x == NULL_RTX)
4081 return last;
4082
4083 switch (GET_CODE (x))
15bbde2b 4084 {
9845d120 4085 case DEBUG_INSN:
31d3e01c 4086 case INSN:
4087 case JUMP_INSN:
4088 case CALL_INSN:
4089 case CODE_LABEL:
4090 case BARRIER:
4091 case NOTE:
4092 insn = x;
4093 while (insn)
4094 {
4095 rtx next = NEXT_INSN (insn);
3072d30e 4096 add_insn_before (insn, before, bb);
31d3e01c 4097 last = insn;
4098 insn = next;
4099 }
4100 break;
4101
4102#ifdef ENABLE_RTL_CHECKING
4103 case SEQUENCE:
611234b4 4104 gcc_unreachable ();
31d3e01c 4105 break;
4106#endif
4107
4108 default:
4109 last = make_insn_raw (x);
3072d30e 4110 add_insn_before (last, before, bb);
31d3e01c 4111 break;
15bbde2b 4112 }
4113
31d3e01c 4114 return last;
15bbde2b 4115}
4116
31d3e01c 4117/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4118 and output it before the instruction BEFORE. */
4119
4120rtx
0891f67c 4121emit_jump_insn_before_noloc (rtx x, rtx before)
15bbde2b 4122{
d90b3d04 4123 rtx insn, last = NULL_RTX;
6e911104 4124
611234b4 4125 gcc_assert (before);
31d3e01c 4126
4127 switch (GET_CODE (x))
6e911104 4128 {
9845d120 4129 case DEBUG_INSN:
31d3e01c 4130 case INSN:
4131 case JUMP_INSN:
4132 case CALL_INSN:
4133 case CODE_LABEL:
4134 case BARRIER:
4135 case NOTE:
4136 insn = x;
4137 while (insn)
4138 {
4139 rtx next = NEXT_INSN (insn);
3072d30e 4140 add_insn_before (insn, before, NULL);
31d3e01c 4141 last = insn;
4142 insn = next;
4143 }
4144 break;
4145
4146#ifdef ENABLE_RTL_CHECKING
4147 case SEQUENCE:
611234b4 4148 gcc_unreachable ();
31d3e01c 4149 break;
4150#endif
4151
4152 default:
4153 last = make_jump_insn_raw (x);
3072d30e 4154 add_insn_before (last, before, NULL);
31d3e01c 4155 break;
6e911104 4156 }
4157
31d3e01c 4158 return last;
15bbde2b 4159}
4160
31d3e01c 4161/* Make an instruction with body X and code CALL_INSN
cd0fe062 4162 and output it before the instruction BEFORE. */
4163
4164rtx
0891f67c 4165emit_call_insn_before_noloc (rtx x, rtx before)
cd0fe062 4166{
d90b3d04 4167 rtx last = NULL_RTX, insn;
cd0fe062 4168
611234b4 4169 gcc_assert (before);
31d3e01c 4170
4171 switch (GET_CODE (x))
cd0fe062 4172 {
9845d120 4173 case DEBUG_INSN:
31d3e01c 4174 case INSN:
4175 case JUMP_INSN:
4176 case CALL_INSN:
4177 case CODE_LABEL:
4178 case BARRIER:
4179 case NOTE:
4180 insn = x;
4181 while (insn)
4182 {
4183 rtx next = NEXT_INSN (insn);
3072d30e 4184 add_insn_before (insn, before, NULL);
31d3e01c 4185 last = insn;
4186 insn = next;
4187 }
4188 break;
4189
4190#ifdef ENABLE_RTL_CHECKING
4191 case SEQUENCE:
611234b4 4192 gcc_unreachable ();
31d3e01c 4193 break;
4194#endif
4195
4196 default:
4197 last = make_call_insn_raw (x);
3072d30e 4198 add_insn_before (last, before, NULL);
31d3e01c 4199 break;
cd0fe062 4200 }
4201
31d3e01c 4202 return last;
cd0fe062 4203}
4204
9845d120 4205/* Make an instruction with body X and code DEBUG_INSN
4206 and output it before the instruction BEFORE. */
4207
4208rtx
4209emit_debug_insn_before_noloc (rtx x, rtx before)
4210{
4211 rtx last = NULL_RTX, insn;
4212
4213 gcc_assert (before);
4214
4215 switch (GET_CODE (x))
4216 {
4217 case DEBUG_INSN:
4218 case INSN:
4219 case JUMP_INSN:
4220 case CALL_INSN:
4221 case CODE_LABEL:
4222 case BARRIER:
4223 case NOTE:
4224 insn = x;
4225 while (insn)
4226 {
4227 rtx next = NEXT_INSN (insn);
4228 add_insn_before (insn, before, NULL);
4229 last = insn;
4230 insn = next;
4231 }
4232 break;
4233
4234#ifdef ENABLE_RTL_CHECKING
4235 case SEQUENCE:
4236 gcc_unreachable ();
4237 break;
4238#endif
4239
4240 default:
4241 last = make_debug_insn_raw (x);
4242 add_insn_before (last, before, NULL);
4243 break;
4244 }
4245
4246 return last;
4247}
4248
15bbde2b 4249/* Make an insn of code BARRIER
71caadc0 4250 and output it before the insn BEFORE. */
15bbde2b 4251
4252rtx
35cb5232 4253emit_barrier_before (rtx before)
15bbde2b 4254{
19cb6b50 4255 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4256
4257 INSN_UID (insn) = cur_insn_uid++;
4258
3072d30e 4259 add_insn_before (insn, before, NULL);
15bbde2b 4260 return insn;
4261}
4262
71caadc0 4263/* Emit the label LABEL before the insn BEFORE. */
4264
4265rtx
35cb5232 4266emit_label_before (rtx label, rtx before)
71caadc0 4267{
4268 /* This can be called twice for the same label as a result of the
4269 confusion that follows a syntax error! So make it harmless. */
4270 if (INSN_UID (label) == 0)
4271 {
4272 INSN_UID (label) = cur_insn_uid++;
3072d30e 4273 add_insn_before (label, before, NULL);
71caadc0 4274 }
4275
4276 return label;
4277}
4278
15bbde2b 4279/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4280
4281rtx
ad4583d9 4282emit_note_before (enum insn_note subtype, rtx before)
15bbde2b 4283{
19cb6b50 4284 rtx note = rtx_alloc (NOTE);
15bbde2b 4285 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4286 NOTE_KIND (note) = subtype;
ab87d1bc 4287 BLOCK_FOR_INSN (note) = NULL;
60ad3b0e 4288 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
15bbde2b 4289
3072d30e 4290 add_insn_before (note, before, NULL);
15bbde2b 4291 return note;
4292}
4293\f
31d3e01c 4294/* Helper for emit_insn_after, handles lists of instructions
4295 efficiently. */
15bbde2b 4296
31d3e01c 4297static rtx
3072d30e 4298emit_insn_after_1 (rtx first, rtx after, basic_block bb)
15bbde2b 4299{
31d3e01c 4300 rtx last;
4301 rtx after_after;
3072d30e 4302 if (!bb && !BARRIER_P (after))
4303 bb = BLOCK_FOR_INSN (after);
15bbde2b 4304
3072d30e 4305 if (bb)
15bbde2b 4306 {
3072d30e 4307 df_set_bb_dirty (bb);
31d3e01c 4308 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
6d7dc5b9 4309 if (!BARRIER_P (last))
3072d30e 4310 {
4311 set_block_for_insn (last, bb);
4312 df_insn_rescan (last);
4313 }
6d7dc5b9 4314 if (!BARRIER_P (last))
3072d30e 4315 {
4316 set_block_for_insn (last, bb);
4317 df_insn_rescan (last);
4318 }
5496dbfc 4319 if (BB_END (bb) == after)
4320 BB_END (bb) = last;
15bbde2b 4321 }
4322 else
31d3e01c 4323 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4324 continue;
4325
4326 after_after = NEXT_INSN (after);
4327
4328 NEXT_INSN (after) = first;
4329 PREV_INSN (first) = after;
4330 NEXT_INSN (last) = after_after;
4331 if (after_after)
4332 PREV_INSN (after_after) = last;
4333
06f9d6ef 4334 if (after == get_last_insn())
4335 set_last_insn (last);
e1ab7874 4336
31d3e01c 4337 return last;
4338}
4339
3072d30e 4340/* Make X be output after the insn AFTER and set the BB of insn. If
4341 BB is NULL, an attempt is made to infer the BB from AFTER. */
31d3e01c 4342
4343rtx
3072d30e 4344emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
31d3e01c 4345{
4346 rtx last = after;
4347
611234b4 4348 gcc_assert (after);
31d3e01c 4349
4350 if (x == NULL_RTX)
4351 return last;
4352
4353 switch (GET_CODE (x))
15bbde2b 4354 {
9845d120 4355 case DEBUG_INSN:
31d3e01c 4356 case INSN:
4357 case JUMP_INSN:
4358 case CALL_INSN:
4359 case CODE_LABEL:
4360 case BARRIER:
4361 case NOTE:
3072d30e 4362 last = emit_insn_after_1 (x, after, bb);
31d3e01c 4363 break;
4364
4365#ifdef ENABLE_RTL_CHECKING
4366 case SEQUENCE:
611234b4 4367 gcc_unreachable ();
31d3e01c 4368 break;
4369#endif
4370
4371 default:
4372 last = make_insn_raw (x);
3072d30e 4373 add_insn_after (last, after, bb);
31d3e01c 4374 break;
15bbde2b 4375 }
4376
31d3e01c 4377 return last;
15bbde2b 4378}
4379
1bea98fb 4380
31d3e01c 4381/* Make an insn of code JUMP_INSN with body X
15bbde2b 4382 and output it after the insn AFTER. */
4383
4384rtx
0891f67c 4385emit_jump_insn_after_noloc (rtx x, rtx after)
15bbde2b 4386{
31d3e01c 4387 rtx last;
15bbde2b 4388
611234b4 4389 gcc_assert (after);
31d3e01c 4390
4391 switch (GET_CODE (x))
15bbde2b 4392 {
9845d120 4393 case DEBUG_INSN:
31d3e01c 4394 case INSN:
4395 case JUMP_INSN:
4396 case CALL_INSN:
4397 case CODE_LABEL:
4398 case BARRIER:
4399 case NOTE:
3072d30e 4400 last = emit_insn_after_1 (x, after, NULL);
31d3e01c 4401 break;
4402
4403#ifdef ENABLE_RTL_CHECKING
4404 case SEQUENCE:
611234b4 4405 gcc_unreachable ();
31d3e01c 4406 break;
4407#endif
4408
4409 default:
4410 last = make_jump_insn_raw (x);
3072d30e 4411 add_insn_after (last, after, NULL);
31d3e01c 4412 break;
15bbde2b 4413 }
4414
31d3e01c 4415 return last;
4416}
4417
4418/* Make an instruction with body X and code CALL_INSN
4419 and output it after the instruction AFTER. */
4420
4421rtx
0891f67c 4422emit_call_insn_after_noloc (rtx x, rtx after)
31d3e01c 4423{
4424 rtx last;
4425
611234b4 4426 gcc_assert (after);
31d3e01c 4427
4428 switch (GET_CODE (x))
4429 {
9845d120 4430 case DEBUG_INSN:
31d3e01c 4431 case INSN:
4432 case JUMP_INSN:
4433 case CALL_INSN:
4434 case CODE_LABEL:
4435 case BARRIER:
4436 case NOTE:
3072d30e 4437 last = emit_insn_after_1 (x, after, NULL);
31d3e01c 4438 break;
4439
4440#ifdef ENABLE_RTL_CHECKING
4441 case SEQUENCE:
611234b4 4442 gcc_unreachable ();
31d3e01c 4443 break;
4444#endif
4445
4446 default:
4447 last = make_call_insn_raw (x);
3072d30e 4448 add_insn_after (last, after, NULL);
31d3e01c 4449 break;
4450 }
4451
4452 return last;
15bbde2b 4453}
4454
9845d120 4455/* Make an instruction with body X and code CALL_INSN
4456 and output it after the instruction AFTER. */
4457
4458rtx
4459emit_debug_insn_after_noloc (rtx x, rtx after)
4460{
4461 rtx last;
4462
4463 gcc_assert (after);
4464
4465 switch (GET_CODE (x))
4466 {
4467 case DEBUG_INSN:
4468 case INSN:
4469 case JUMP_INSN:
4470 case CALL_INSN:
4471 case CODE_LABEL:
4472 case BARRIER:
4473 case NOTE:
4474 last = emit_insn_after_1 (x, after, NULL);
4475 break;
4476
4477#ifdef ENABLE_RTL_CHECKING
4478 case SEQUENCE:
4479 gcc_unreachable ();
4480 break;
4481#endif
4482
4483 default:
4484 last = make_debug_insn_raw (x);
4485 add_insn_after (last, after, NULL);
4486 break;
4487 }
4488
4489 return last;
4490}
4491
15bbde2b 4492/* Make an insn of code BARRIER
4493 and output it after the insn AFTER. */
4494
4495rtx
35cb5232 4496emit_barrier_after (rtx after)
15bbde2b 4497{
19cb6b50 4498 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4499
4500 INSN_UID (insn) = cur_insn_uid++;
4501
3072d30e 4502 add_insn_after (insn, after, NULL);
15bbde2b 4503 return insn;
4504}
4505
4506/* Emit the label LABEL after the insn AFTER. */
4507
4508rtx
35cb5232 4509emit_label_after (rtx label, rtx after)
15bbde2b 4510{
4511 /* This can be called twice for the same label
4512 as a result of the confusion that follows a syntax error!
4513 So make it harmless. */
4514 if (INSN_UID (label) == 0)
4515 {
4516 INSN_UID (label) = cur_insn_uid++;
3072d30e 4517 add_insn_after (label, after, NULL);
15bbde2b 4518 }
4519
4520 return label;
4521}
4522
4523/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4524
4525rtx
ad4583d9 4526emit_note_after (enum insn_note subtype, rtx after)
15bbde2b 4527{
19cb6b50 4528 rtx note = rtx_alloc (NOTE);
15bbde2b 4529 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4530 NOTE_KIND (note) = subtype;
ab87d1bc 4531 BLOCK_FOR_INSN (note) = NULL;
60ad3b0e 4532 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3072d30e 4533 add_insn_after (note, after, NULL);
15bbde2b 4534 return note;
4535}
15bbde2b 4536\f
0891f67c 4537/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4538rtx
35cb5232 4539emit_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4540{
3072d30e 4541 rtx last = emit_insn_after_noloc (pattern, after, NULL);
d321a68b 4542
0891f67c 4543 if (pattern == NULL_RTX || !loc)
ca154f3f 4544 return last;
4545
31d3e01c 4546 after = NEXT_INSN (after);
4547 while (1)
4548 {
0891f67c 4549 if (active_insn_p (after) && !INSN_LOCATOR (after))
13751393 4550 INSN_LOCATOR (after) = loc;
31d3e01c 4551 if (after == last)
4552 break;
4553 after = NEXT_INSN (after);
4554 }
d321a68b 4555 return last;
4556}
4557
0891f67c 4558/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4559rtx
4560emit_insn_after (rtx pattern, rtx after)
4561{
9845d120 4562 rtx prev = after;
4563
4564 while (DEBUG_INSN_P (prev))
4565 prev = PREV_INSN (prev);
4566
4567 if (INSN_P (prev))
4568 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
0891f67c 4569 else
3072d30e 4570 return emit_insn_after_noloc (pattern, after, NULL);
0891f67c 4571}
4572
4573/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4574rtx
35cb5232 4575emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4576{
0891f67c 4577 rtx last = emit_jump_insn_after_noloc (pattern, after);
31d3e01c 4578
0891f67c 4579 if (pattern == NULL_RTX || !loc)
ca154f3f 4580 return last;
4581
31d3e01c 4582 after = NEXT_INSN (after);
4583 while (1)
4584 {
0891f67c 4585 if (active_insn_p (after) && !INSN_LOCATOR (after))
13751393 4586 INSN_LOCATOR (after) = loc;
31d3e01c 4587 if (after == last)
4588 break;
4589 after = NEXT_INSN (after);
4590 }
d321a68b 4591 return last;
4592}
4593
0891f67c 4594/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4595rtx
4596emit_jump_insn_after (rtx pattern, rtx after)
4597{
9845d120 4598 rtx prev = after;
4599
4600 while (DEBUG_INSN_P (prev))
4601 prev = PREV_INSN (prev);
4602
4603 if (INSN_P (prev))
4604 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
0891f67c 4605 else
4606 return emit_jump_insn_after_noloc (pattern, after);
4607}
4608
4609/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4610rtx
35cb5232 4611emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4612{
0891f67c 4613 rtx last = emit_call_insn_after_noloc (pattern, after);
31d3e01c 4614
0891f67c 4615 if (pattern == NULL_RTX || !loc)
ca154f3f 4616 return last;
4617
31d3e01c 4618 after = NEXT_INSN (after);
4619 while (1)
4620 {
0891f67c 4621 if (active_insn_p (after) && !INSN_LOCATOR (after))
13751393 4622 INSN_LOCATOR (after) = loc;
31d3e01c 4623 if (after == last)
4624 break;
4625 after = NEXT_INSN (after);
4626 }
d321a68b 4627 return last;
4628}
4629
0891f67c 4630/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4631rtx
4632emit_call_insn_after (rtx pattern, rtx after)
4633{
9845d120 4634 rtx prev = after;
4635
4636 while (DEBUG_INSN_P (prev))
4637 prev = PREV_INSN (prev);
4638
4639 if (INSN_P (prev))
4640 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
0891f67c 4641 else
4642 return emit_call_insn_after_noloc (pattern, after);
4643}
4644
9845d120 4645/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4646rtx
4647emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4648{
4649 rtx last = emit_debug_insn_after_noloc (pattern, after);
4650
4651 if (pattern == NULL_RTX || !loc)
4652 return last;
4653
4654 after = NEXT_INSN (after);
4655 while (1)
4656 {
4657 if (active_insn_p (after) && !INSN_LOCATOR (after))
4658 INSN_LOCATOR (after) = loc;
4659 if (after == last)
4660 break;
4661 after = NEXT_INSN (after);
4662 }
4663 return last;
4664}
4665
4666/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4667rtx
4668emit_debug_insn_after (rtx pattern, rtx after)
4669{
4670 if (INSN_P (after))
4671 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4672 else
4673 return emit_debug_insn_after_noloc (pattern, after);
4674}
4675
0891f67c 4676/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4677rtx
35cb5232 4678emit_insn_before_setloc (rtx pattern, rtx before, int loc)
d321a68b 4679{
4680 rtx first = PREV_INSN (before);
3072d30e 4681 rtx last = emit_insn_before_noloc (pattern, before, NULL);
0891f67c 4682
4683 if (pattern == NULL_RTX || !loc)
4684 return last;
4685
4486418e 4686 if (!first)
4687 first = get_insns ();
4688 else
4689 first = NEXT_INSN (first);
0891f67c 4690 while (1)
4691 {
4692 if (active_insn_p (first) && !INSN_LOCATOR (first))
4693 INSN_LOCATOR (first) = loc;
4694 if (first == last)
4695 break;
4696 first = NEXT_INSN (first);
4697 }
4698 return last;
4699}
4700
4701/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4702rtx
4703emit_insn_before (rtx pattern, rtx before)
4704{
9845d120 4705 rtx next = before;
4706
4707 while (DEBUG_INSN_P (next))
4708 next = PREV_INSN (next);
4709
4710 if (INSN_P (next))
4711 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
0891f67c 4712 else
3072d30e 4713 return emit_insn_before_noloc (pattern, before, NULL);
0891f67c 4714}
4715
4716/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4717rtx
4718emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4719{
4720 rtx first = PREV_INSN (before);
4721 rtx last = emit_jump_insn_before_noloc (pattern, before);
4722
4723 if (pattern == NULL_RTX)
4724 return last;
4725
4726 first = NEXT_INSN (first);
4727 while (1)
4728 {
4729 if (active_insn_p (first) && !INSN_LOCATOR (first))
4730 INSN_LOCATOR (first) = loc;
4731 if (first == last)
4732 break;
4733 first = NEXT_INSN (first);
4734 }
4735 return last;
4736}
4737
4738/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4739rtx
4740emit_jump_insn_before (rtx pattern, rtx before)
4741{
9845d120 4742 rtx next = before;
4743
4744 while (DEBUG_INSN_P (next))
4745 next = PREV_INSN (next);
4746
4747 if (INSN_P (next))
4748 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
0891f67c 4749 else
4750 return emit_jump_insn_before_noloc (pattern, before);
4751}
4752
4753/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4754rtx
4755emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4756{
4757 rtx first = PREV_INSN (before);
4758 rtx last = emit_call_insn_before_noloc (pattern, before);
d321a68b 4759
ca154f3f 4760 if (pattern == NULL_RTX)
4761 return last;
4762
31d3e01c 4763 first = NEXT_INSN (first);
4764 while (1)
4765 {
0891f67c 4766 if (active_insn_p (first) && !INSN_LOCATOR (first))
13751393 4767 INSN_LOCATOR (first) = loc;
31d3e01c 4768 if (first == last)
4769 break;
4770 first = NEXT_INSN (first);
4771 }
d321a68b 4772 return last;
4773}
0891f67c 4774
4775/* like emit_call_insn_before_noloc,
4776 but set insn_locator according to before. */
4777rtx
4778emit_call_insn_before (rtx pattern, rtx before)
4779{
9845d120 4780 rtx next = before;
4781
4782 while (DEBUG_INSN_P (next))
4783 next = PREV_INSN (next);
4784
4785 if (INSN_P (next))
4786 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
0891f67c 4787 else
4788 return emit_call_insn_before_noloc (pattern, before);
4789}
9845d120 4790
4791/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4792rtx
4793emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4794{
4795 rtx first = PREV_INSN (before);
4796 rtx last = emit_debug_insn_before_noloc (pattern, before);
4797
4798 if (pattern == NULL_RTX)
4799 return last;
4800
4801 first = NEXT_INSN (first);
4802 while (1)
4803 {
4804 if (active_insn_p (first) && !INSN_LOCATOR (first))
4805 INSN_LOCATOR (first) = loc;
4806 if (first == last)
4807 break;
4808 first = NEXT_INSN (first);
4809 }
4810 return last;
4811}
4812
4813/* like emit_debug_insn_before_noloc,
4814 but set insn_locator according to before. */
4815rtx
4816emit_debug_insn_before (rtx pattern, rtx before)
4817{
4818 if (INSN_P (before))
4819 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4820 else
4821 return emit_debug_insn_before_noloc (pattern, before);
4822}
d321a68b 4823\f
31d3e01c 4824/* Take X and emit it at the end of the doubly-linked
4825 INSN list.
15bbde2b 4826
4827 Returns the last insn emitted. */
4828
4829rtx
35cb5232 4830emit_insn (rtx x)
15bbde2b 4831{
06f9d6ef 4832 rtx last = get_last_insn();
31d3e01c 4833 rtx insn;
15bbde2b 4834
31d3e01c 4835 if (x == NULL_RTX)
4836 return last;
15bbde2b 4837
31d3e01c 4838 switch (GET_CODE (x))
4839 {
9845d120 4840 case DEBUG_INSN:
31d3e01c 4841 case INSN:
4842 case JUMP_INSN:
4843 case CALL_INSN:
4844 case CODE_LABEL:
4845 case BARRIER:
4846 case NOTE:
4847 insn = x;
4848 while (insn)
15bbde2b 4849 {
31d3e01c 4850 rtx next = NEXT_INSN (insn);
15bbde2b 4851 add_insn (insn);
31d3e01c 4852 last = insn;
4853 insn = next;
15bbde2b 4854 }
31d3e01c 4855 break;
15bbde2b 4856
31d3e01c 4857#ifdef ENABLE_RTL_CHECKING
4858 case SEQUENCE:
611234b4 4859 gcc_unreachable ();
31d3e01c 4860 break;
4861#endif
15bbde2b 4862
31d3e01c 4863 default:
4864 last = make_insn_raw (x);
4865 add_insn (last);
4866 break;
15bbde2b 4867 }
4868
4869 return last;
4870}
4871
9845d120 4872/* Make an insn of code DEBUG_INSN with pattern X
4873 and add it to the end of the doubly-linked list. */
4874
4875rtx
4876emit_debug_insn (rtx x)
4877{
06f9d6ef 4878 rtx last = get_last_insn();
9845d120 4879 rtx insn;
4880
4881 if (x == NULL_RTX)
4882 return last;
4883
4884 switch (GET_CODE (x))
4885 {
4886 case DEBUG_INSN:
4887 case INSN:
4888 case JUMP_INSN:
4889 case CALL_INSN:
4890 case CODE_LABEL:
4891 case BARRIER:
4892 case NOTE:
4893 insn = x;
4894 while (insn)
4895 {
4896 rtx next = NEXT_INSN (insn);
4897 add_insn (insn);
4898 last = insn;
4899 insn = next;
4900 }
4901 break;
4902
4903#ifdef ENABLE_RTL_CHECKING
4904 case SEQUENCE:
4905 gcc_unreachable ();
4906 break;
4907#endif
4908
4909 default:
4910 last = make_debug_insn_raw (x);
4911 add_insn (last);
4912 break;
4913 }
4914
4915 return last;
4916}
4917
31d3e01c 4918/* Make an insn of code JUMP_INSN with pattern X
4919 and add it to the end of the doubly-linked list. */
15bbde2b 4920
4921rtx
35cb5232 4922emit_jump_insn (rtx x)
15bbde2b 4923{
d90b3d04 4924 rtx last = NULL_RTX, insn;
15bbde2b 4925
31d3e01c 4926 switch (GET_CODE (x))
15bbde2b 4927 {
9845d120 4928 case DEBUG_INSN:
31d3e01c 4929 case INSN:
4930 case JUMP_INSN:
4931 case CALL_INSN:
4932 case CODE_LABEL:
4933 case BARRIER:
4934 case NOTE:
4935 insn = x;
4936 while (insn)
4937 {
4938 rtx next = NEXT_INSN (insn);
4939 add_insn (insn);
4940 last = insn;
4941 insn = next;
4942 }
4943 break;
b36b07d8 4944
31d3e01c 4945#ifdef ENABLE_RTL_CHECKING
4946 case SEQUENCE:
611234b4 4947 gcc_unreachable ();
31d3e01c 4948 break;
4949#endif
b36b07d8 4950
31d3e01c 4951 default:
4952 last = make_jump_insn_raw (x);
4953 add_insn (last);
4954 break;
9dda7915 4955 }
b36b07d8 4956
4957 return last;
4958}
4959
31d3e01c 4960/* Make an insn of code CALL_INSN with pattern X
15bbde2b 4961 and add it to the end of the doubly-linked list. */
4962
4963rtx
35cb5232 4964emit_call_insn (rtx x)
15bbde2b 4965{
31d3e01c 4966 rtx insn;
4967
4968 switch (GET_CODE (x))
15bbde2b 4969 {
9845d120 4970 case DEBUG_INSN:
31d3e01c 4971 case INSN:
4972 case JUMP_INSN:
4973 case CALL_INSN:
4974 case CODE_LABEL:
4975 case BARRIER:
4976 case NOTE:
4977 insn = emit_insn (x);
4978 break;
15bbde2b 4979
31d3e01c 4980#ifdef ENABLE_RTL_CHECKING
4981 case SEQUENCE:
611234b4 4982 gcc_unreachable ();
31d3e01c 4983 break;
4984#endif
15bbde2b 4985
31d3e01c 4986 default:
4987 insn = make_call_insn_raw (x);
15bbde2b 4988 add_insn (insn);
31d3e01c 4989 break;
15bbde2b 4990 }
31d3e01c 4991
4992 return insn;
15bbde2b 4993}
4994
4995/* Add the label LABEL to the end of the doubly-linked list. */
4996
4997rtx
35cb5232 4998emit_label (rtx label)
15bbde2b 4999{
5000 /* This can be called twice for the same label
5001 as a result of the confusion that follows a syntax error!
5002 So make it harmless. */
5003 if (INSN_UID (label) == 0)
5004 {
5005 INSN_UID (label) = cur_insn_uid++;
5006 add_insn (label);
5007 }
5008 return label;
5009}
5010
5011/* Make an insn of code BARRIER
5012 and add it to the end of the doubly-linked list. */
5013
5014rtx
35cb5232 5015emit_barrier (void)
15bbde2b 5016{
19cb6b50 5017 rtx barrier = rtx_alloc (BARRIER);
15bbde2b 5018 INSN_UID (barrier) = cur_insn_uid++;
5019 add_insn (barrier);
5020 return barrier;
5021}
5022
2f57e3d9 5023/* Emit a copy of note ORIG. */
35cb5232 5024
2f57e3d9 5025rtx
5026emit_note_copy (rtx orig)
5027{
5028 rtx note;
48e1416a 5029
2f57e3d9 5030 note = rtx_alloc (NOTE);
48e1416a 5031
2f57e3d9 5032 INSN_UID (note) = cur_insn_uid++;
5033 NOTE_DATA (note) = NOTE_DATA (orig);
ad4583d9 5034 NOTE_KIND (note) = NOTE_KIND (orig);
2f57e3d9 5035 BLOCK_FOR_INSN (note) = NULL;
5036 add_insn (note);
48e1416a 5037
31b97e8f 5038 return note;
15bbde2b 5039}
5040
31b97e8f 5041/* Make an insn of code NOTE or type NOTE_NO
5042 and add it to the end of the doubly-linked list. */
15bbde2b 5043
5044rtx
ad4583d9 5045emit_note (enum insn_note kind)
15bbde2b 5046{
19cb6b50 5047 rtx note;
15bbde2b 5048
15bbde2b 5049 note = rtx_alloc (NOTE);
5050 INSN_UID (note) = cur_insn_uid++;
ad4583d9 5051 NOTE_KIND (note) = kind;
6c7786cb 5052 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ab87d1bc 5053 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 5054 add_insn (note);
5055 return note;
5056}
5057
18b42941 5058/* Emit a clobber of lvalue X. */
5059
5060rtx
5061emit_clobber (rtx x)
5062{
5063 /* CONCATs should not appear in the insn stream. */
5064 if (GET_CODE (x) == CONCAT)
5065 {
5066 emit_clobber (XEXP (x, 0));
5067 return emit_clobber (XEXP (x, 1));
5068 }
5069 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5070}
5071
5072/* Return a sequence of insns to clobber lvalue X. */
5073
5074rtx
5075gen_clobber (rtx x)
5076{
5077 rtx seq;
5078
5079 start_sequence ();
5080 emit_clobber (x);
5081 seq = get_insns ();
5082 end_sequence ();
5083 return seq;
5084}
5085
5086/* Emit a use of rvalue X. */
5087
5088rtx
5089emit_use (rtx x)
5090{
5091 /* CONCATs should not appear in the insn stream. */
5092 if (GET_CODE (x) == CONCAT)
5093 {
5094 emit_use (XEXP (x, 0));
5095 return emit_use (XEXP (x, 1));
5096 }
5097 return emit_insn (gen_rtx_USE (VOIDmode, x));
5098}
5099
5100/* Return a sequence of insns to use rvalue X. */
5101
5102rtx
5103gen_use (rtx x)
5104{
5105 rtx seq;
5106
5107 start_sequence ();
5108 emit_use (x);
5109 seq = get_insns ();
5110 end_sequence ();
5111 return seq;
5112}
5113
15bbde2b 5114/* Cause next statement to emit a line note even if the line number
bccd9980 5115 has not changed. */
15bbde2b 5116
5117void
35cb5232 5118force_next_line_note (void)
15bbde2b 5119{
7bd3dcc4 5120 last_location = -1;
15bbde2b 5121}
f1934a33 5122
5123/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 5124 note of this type already exists, remove it first. */
f1934a33 5125
c080d8f0 5126rtx
35cb5232 5127set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
f1934a33 5128{
5129 rtx note = find_reg_note (insn, kind, NULL_RTX);
5130
7e6224ab 5131 switch (kind)
5132 {
5133 case REG_EQUAL:
5134 case REG_EQUIV:
5135 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5136 has multiple sets (some callers assume single_set
5137 means the insn only has one set, when in fact it
5138 means the insn only has one * useful * set). */
5139 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5140 {
611234b4 5141 gcc_assert (!note);
7e6224ab 5142 return NULL_RTX;
5143 }
5144
5145 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5146 It serves no useful purpose and breaks eliminate_regs. */
5147 if (GET_CODE (datum) == ASM_OPERANDS)
5148 return NULL_RTX;
3072d30e 5149
5150 if (note)
5151 {
5152 XEXP (note, 0) = datum;
5153 df_notes_rescan (insn);
5154 return note;
5155 }
7e6224ab 5156 break;
5157
5158 default:
3072d30e 5159 if (note)
5160 {
5161 XEXP (note, 0) = datum;
5162 return note;
5163 }
7e6224ab 5164 break;
5165 }
c080d8f0 5166
a1ddb869 5167 add_reg_note (insn, kind, datum);
3072d30e 5168
5169 switch (kind)
c080d8f0 5170 {
3072d30e 5171 case REG_EQUAL:
5172 case REG_EQUIV:
5173 df_notes_rescan (insn);
5174 break;
5175 default:
5176 break;
c080d8f0 5177 }
f1934a33 5178
c080d8f0 5179 return REG_NOTES (insn);
f1934a33 5180}
15bbde2b 5181\f
5182/* Return an indication of which type of insn should have X as a body.
5183 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5184
9b69f75b 5185static enum rtx_code
35cb5232 5186classify_insn (rtx x)
15bbde2b 5187{
6d7dc5b9 5188 if (LABEL_P (x))
15bbde2b 5189 return CODE_LABEL;
5190 if (GET_CODE (x) == CALL)
5191 return CALL_INSN;
5192 if (GET_CODE (x) == RETURN)
5193 return JUMP_INSN;
5194 if (GET_CODE (x) == SET)
5195 {
5196 if (SET_DEST (x) == pc_rtx)
5197 return JUMP_INSN;
5198 else if (GET_CODE (SET_SRC (x)) == CALL)
5199 return CALL_INSN;
5200 else
5201 return INSN;
5202 }
5203 if (GET_CODE (x) == PARALLEL)
5204 {
19cb6b50 5205 int j;
15bbde2b 5206 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5207 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5208 return CALL_INSN;
5209 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5210 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5211 return JUMP_INSN;
5212 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5213 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5214 return CALL_INSN;
5215 }
5216 return INSN;
5217}
5218
5219/* Emit the rtl pattern X as an appropriate kind of insn.
5220 If X is a label, it is simply added into the insn chain. */
5221
5222rtx
35cb5232 5223emit (rtx x)
15bbde2b 5224{
5225 enum rtx_code code = classify_insn (x);
5226
611234b4 5227 switch (code)
15bbde2b 5228 {
611234b4 5229 case CODE_LABEL:
5230 return emit_label (x);
5231 case INSN:
5232 return emit_insn (x);
5233 case JUMP_INSN:
5234 {
5235 rtx insn = emit_jump_insn (x);
5236 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5237 return emit_barrier ();
5238 return insn;
5239 }
5240 case CALL_INSN:
5241 return emit_call_insn (x);
9845d120 5242 case DEBUG_INSN:
5243 return emit_debug_insn (x);
611234b4 5244 default:
5245 gcc_unreachable ();
15bbde2b 5246 }
15bbde2b 5247}
5248\f
1f3233d1 5249/* Space for free sequence stack entries. */
7035b2ab 5250static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
1f3233d1 5251
735f4358 5252/* Begin emitting insns to a sequence. If this sequence will contain
5253 something that might cause the compiler to pop arguments to function
5254 calls (because those pops have previously been deferred; see
5255 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5256 before calling this function. That will ensure that the deferred
5257 pops are not accidentally emitted in the middle of this sequence. */
15bbde2b 5258
5259void
35cb5232 5260start_sequence (void)
15bbde2b 5261{
5262 struct sequence_stack *tem;
5263
1f3233d1 5264 if (free_sequence_stack != NULL)
5265 {
5266 tem = free_sequence_stack;
5267 free_sequence_stack = tem->next;
5268 }
5269 else
ba72912a 5270 tem = ggc_alloc_sequence_stack ();
15bbde2b 5271
0a893c29 5272 tem->next = seq_stack;
06f9d6ef 5273 tem->first = get_insns ();
5274 tem->last = get_last_insn ();
15bbde2b 5275
0a893c29 5276 seq_stack = tem;
15bbde2b 5277
06f9d6ef 5278 set_first_insn (0);
5279 set_last_insn (0);
15bbde2b 5280}
5281
b49854c6 5282/* Set up the insn chain starting with FIRST as the current sequence,
5283 saving the previously current one. See the documentation for
5284 start_sequence for more information about how to use this function. */
15bbde2b 5285
5286void
35cb5232 5287push_to_sequence (rtx first)
15bbde2b 5288{
5289 rtx last;
5290
5291 start_sequence ();
5292
5293 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5294
06f9d6ef 5295 set_first_insn (first);
5296 set_last_insn (last);
15bbde2b 5297}
5298
28bf151d 5299/* Like push_to_sequence, but take the last insn as an argument to avoid
5300 looping through the list. */
5301
5302void
5303push_to_sequence2 (rtx first, rtx last)
5304{
5305 start_sequence ();
5306
06f9d6ef 5307 set_first_insn (first);
5308 set_last_insn (last);
28bf151d 5309}
5310
ab74c92f 5311/* Set up the outer-level insn chain
5312 as the current sequence, saving the previously current one. */
5313
5314void
35cb5232 5315push_topmost_sequence (void)
ab74c92f 5316{
2041cfd9 5317 struct sequence_stack *stack, *top = NULL;
ab74c92f 5318
5319 start_sequence ();
5320
0a893c29 5321 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5322 top = stack;
5323
06f9d6ef 5324 set_first_insn (top->first);
5325 set_last_insn (top->last);
ab74c92f 5326}
5327
5328/* After emitting to the outer-level insn chain, update the outer-level
5329 insn chain, and restore the previous saved state. */
5330
5331void
35cb5232 5332pop_topmost_sequence (void)
ab74c92f 5333{
2041cfd9 5334 struct sequence_stack *stack, *top = NULL;
ab74c92f 5335
0a893c29 5336 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5337 top = stack;
5338
06f9d6ef 5339 top->first = get_insns ();
5340 top->last = get_last_insn ();
ab74c92f 5341
5342 end_sequence ();
5343}
5344
15bbde2b 5345/* After emitting to a sequence, restore previous saved state.
5346
b49854c6 5347 To get the contents of the sequence just made, you must call
31d3e01c 5348 `get_insns' *before* calling here.
b49854c6 5349
5350 If the compiler might have deferred popping arguments while
5351 generating this sequence, and this sequence will not be immediately
5352 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 5353 before calling get_insns. That will ensure that the deferred
b49854c6 5354 pops are inserted into this sequence, and not into some random
5355 location in the instruction stream. See INHIBIT_DEFER_POP for more
5356 information about deferred popping of arguments. */
15bbde2b 5357
5358void
35cb5232 5359end_sequence (void)
15bbde2b 5360{
0a893c29 5361 struct sequence_stack *tem = seq_stack;
15bbde2b 5362
06f9d6ef 5363 set_first_insn (tem->first);
5364 set_last_insn (tem->last);
0a893c29 5365 seq_stack = tem->next;
15bbde2b 5366
1f3233d1 5367 memset (tem, 0, sizeof (*tem));
5368 tem->next = free_sequence_stack;
5369 free_sequence_stack = tem;
15bbde2b 5370}
5371
5372/* Return 1 if currently emitting into a sequence. */
5373
5374int
35cb5232 5375in_sequence_p (void)
15bbde2b 5376{
0a893c29 5377 return seq_stack != 0;
15bbde2b 5378}
15bbde2b 5379\f
02ebfa52 5380/* Put the various virtual registers into REGNO_REG_RTX. */
5381
2f3874ce 5382static void
b079a207 5383init_virtual_regs (void)
02ebfa52 5384{
b079a207 5385 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5386 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5387 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5388 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5389 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
0a893c29 5390}
5391
928d57e3 5392\f
5393/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5394static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5395static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5396static int copy_insn_n_scratches;
5397
5398/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5399 copied an ASM_OPERANDS.
5400 In that case, it is the original input-operand vector. */
5401static rtvec orig_asm_operands_vector;
5402
5403/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5404 copied an ASM_OPERANDS.
5405 In that case, it is the copied input-operand vector. */
5406static rtvec copy_asm_operands_vector;
5407
5408/* Likewise for the constraints vector. */
5409static rtvec orig_asm_constraints_vector;
5410static rtvec copy_asm_constraints_vector;
5411
5412/* Recursively create a new copy of an rtx for copy_insn.
5413 This function differs from copy_rtx in that it handles SCRATCHes and
5414 ASM_OPERANDs properly.
5415 Normally, this function is not used directly; use copy_insn as front end.
5416 However, you could first copy an insn pattern with copy_insn and then use
5417 this function afterwards to properly copy any REG_NOTEs containing
5418 SCRATCHes. */
5419
5420rtx
35cb5232 5421copy_insn_1 (rtx orig)
928d57e3 5422{
19cb6b50 5423 rtx copy;
5424 int i, j;
5425 RTX_CODE code;
5426 const char *format_ptr;
928d57e3 5427
25e880b1 5428 if (orig == NULL)
5429 return NULL;
5430
928d57e3 5431 code = GET_CODE (orig);
5432
5433 switch (code)
5434 {
5435 case REG:
928d57e3 5436 case CONST_INT:
5437 case CONST_DOUBLE:
e397ad8e 5438 case CONST_FIXED:
886cfd4f 5439 case CONST_VECTOR:
928d57e3 5440 case SYMBOL_REF:
5441 case CODE_LABEL:
5442 case PC:
5443 case CC0:
928d57e3 5444 return orig;
c09425a0 5445 case CLOBBER:
5446 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5447 return orig;
5448 break;
928d57e3 5449
5450 case SCRATCH:
5451 for (i = 0; i < copy_insn_n_scratches; i++)
5452 if (copy_insn_scratch_in[i] == orig)
5453 return copy_insn_scratch_out[i];
5454 break;
5455
5456 case CONST:
3072d30e 5457 if (shared_const_p (orig))
928d57e3 5458 return orig;
5459 break;
d823ba47 5460
928d57e3 5461 /* A MEM with a constant address is not sharable. The problem is that
5462 the constant address may need to be reloaded. If the mem is shared,
5463 then reloading one copy of this mem will cause all copies to appear
5464 to have been reloaded. */
5465
5466 default:
5467 break;
5468 }
5469
f2d0e9f1 5470 /* Copy the various flags, fields, and other information. We assume
5471 that all fields need copying, and then clear the fields that should
928d57e3 5472 not be copied. That is the sensible default behavior, and forces
5473 us to explicitly document why we are *not* copying a flag. */
f2d0e9f1 5474 copy = shallow_copy_rtx (orig);
928d57e3 5475
5476 /* We do not copy the USED flag, which is used as a mark bit during
5477 walks over the RTL. */
7c25cb91 5478 RTX_FLAG (copy, used) = 0;
928d57e3 5479
5480 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
6720e96c 5481 if (INSN_P (orig))
928d57e3 5482 {
7c25cb91 5483 RTX_FLAG (copy, jump) = 0;
5484 RTX_FLAG (copy, call) = 0;
5485 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5486 }
d823ba47 5487
928d57e3 5488 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5489
5490 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
f2d0e9f1 5491 switch (*format_ptr++)
5492 {
5493 case 'e':
5494 if (XEXP (orig, i) != NULL)
5495 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5496 break;
928d57e3 5497
f2d0e9f1 5498 case 'E':
5499 case 'V':
5500 if (XVEC (orig, i) == orig_asm_constraints_vector)
5501 XVEC (copy, i) = copy_asm_constraints_vector;
5502 else if (XVEC (orig, i) == orig_asm_operands_vector)
5503 XVEC (copy, i) = copy_asm_operands_vector;
5504 else if (XVEC (orig, i) != NULL)
5505 {
5506 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5507 for (j = 0; j < XVECLEN (copy, i); j++)
5508 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5509 }
5510 break;
928d57e3 5511
f2d0e9f1 5512 case 't':
5513 case 'w':
5514 case 'i':
5515 case 's':
5516 case 'S':
5517 case 'u':
5518 case '0':
5519 /* These are left unchanged. */
5520 break;
928d57e3 5521
f2d0e9f1 5522 default:
5523 gcc_unreachable ();
5524 }
928d57e3 5525
5526 if (code == SCRATCH)
5527 {
5528 i = copy_insn_n_scratches++;
611234b4 5529 gcc_assert (i < MAX_RECOG_OPERANDS);
928d57e3 5530 copy_insn_scratch_in[i] = orig;
5531 copy_insn_scratch_out[i] = copy;
5532 }
5533 else if (code == ASM_OPERANDS)
5534 {
d91f2122 5535 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5536 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5537 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5538 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5539 }
5540
5541 return copy;
5542}
5543
5544/* Create a new copy of an rtx.
5545 This function differs from copy_rtx in that it handles SCRATCHes and
5546 ASM_OPERANDs properly.
5547 INSN doesn't really have to be a full INSN; it could be just the
5548 pattern. */
5549rtx
35cb5232 5550copy_insn (rtx insn)
928d57e3 5551{
5552 copy_insn_n_scratches = 0;
5553 orig_asm_operands_vector = 0;
5554 orig_asm_constraints_vector = 0;
5555 copy_asm_operands_vector = 0;
5556 copy_asm_constraints_vector = 0;
5557 return copy_insn_1 (insn);
5558}
02ebfa52 5559
15bbde2b 5560/* Initialize data structures and variables in this file
5561 before generating rtl for each function. */
5562
5563void
35cb5232 5564init_emit (void)
15bbde2b 5565{
06f9d6ef 5566 set_first_insn (NULL);
5567 set_last_insn (NULL);
9845d120 5568 if (MIN_NONDEBUG_INSN_UID)
5569 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5570 else
5571 cur_insn_uid = 1;
5572 cur_debug_insn_uid = 1;
15bbde2b 5573 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
7bd3dcc4 5574 last_location = UNKNOWN_LOCATION;
15bbde2b 5575 first_label_num = label_num;
0a893c29 5576 seq_stack = NULL;
15bbde2b 5577
15bbde2b 5578 /* Init the tables that describe all the pseudo regs. */
5579
fd6ffb7c 5580 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5581
fd6ffb7c 5582 crtl->emit.regno_pointer_align
2457c754 5583 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
d4c332ff 5584
ba72912a 5585 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
fcdc122e 5586
936082bb 5587 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5588 memcpy (regno_reg_rtx,
679bcc8d 5589 initial_regno_reg_rtx,
90295bd2 5590 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5591
15bbde2b 5592 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
b079a207 5593 init_virtual_regs ();
888e0d33 5594
5595 /* Indicate that the virtual registers and stack locations are
5596 all pointers. */
e61a0a7f 5597 REG_POINTER (stack_pointer_rtx) = 1;
5598 REG_POINTER (frame_pointer_rtx) = 1;
5599 REG_POINTER (hard_frame_pointer_rtx) = 1;
5600 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5601
e61a0a7f 5602 REG_POINTER (virtual_incoming_args_rtx) = 1;
5603 REG_POINTER (virtual_stack_vars_rtx) = 1;
5604 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5605 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5606 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5607
d4c332ff 5608#ifdef STACK_BOUNDARY
80909c64 5609 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5610 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5611 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5612 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5613
5614 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5615 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5616 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5617 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5618 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5619#endif
5620
89525da0 5621#ifdef INIT_EXPANDERS
5622 INIT_EXPANDERS;
5623#endif
15bbde2b 5624}
5625
6e68dcb2 5626/* Generate a vector constant for mode MODE and constant value CONSTANT. */
886cfd4f 5627
5628static rtx
6e68dcb2 5629gen_const_vector (enum machine_mode mode, int constant)
886cfd4f 5630{
5631 rtx tem;
5632 rtvec v;
5633 int units, i;
5634 enum machine_mode inner;
5635
5636 units = GET_MODE_NUNITS (mode);
5637 inner = GET_MODE_INNER (mode);
5638
069b07bf 5639 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5640
886cfd4f 5641 v = rtvec_alloc (units);
5642
6e68dcb2 5643 /* We need to call this function after we set the scalar const_tiny_rtx
5644 entries. */
5645 gcc_assert (const_tiny_rtx[constant][(int) inner]);
886cfd4f 5646
5647 for (i = 0; i < units; ++i)
6e68dcb2 5648 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
886cfd4f 5649
9426b612 5650 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5651 return tem;
5652}
5653
9426b612 5654/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6e68dcb2 5655 all elements are zero, and the one vector when all elements are one. */
9426b612 5656rtx
35cb5232 5657gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
9426b612 5658{
6e68dcb2 5659 enum machine_mode inner = GET_MODE_INNER (mode);
5660 int nunits = GET_MODE_NUNITS (mode);
5661 rtx x;
9426b612 5662 int i;
5663
6e68dcb2 5664 /* Check to see if all of the elements have the same value. */
5665 x = RTVEC_ELT (v, nunits - 1);
5666 for (i = nunits - 2; i >= 0; i--)
5667 if (RTVEC_ELT (v, i) != x)
5668 break;
5669
5670 /* If the values are all the same, check to see if we can use one of the
5671 standard constant vectors. */
5672 if (i == -1)
5673 {
5674 if (x == CONST0_RTX (inner))
5675 return CONST0_RTX (mode);
5676 else if (x == CONST1_RTX (inner))
5677 return CONST1_RTX (mode);
5678 }
5679
5680 return gen_rtx_raw_CONST_VECTOR (mode, v);
9426b612 5681}
5682
6d8b68a3 5683/* Initialise global register information required by all functions. */
5684
5685void
5686init_emit_regs (void)
5687{
5688 int i;
5689
5690 /* Reset register attributes */
5691 htab_empty (reg_attrs_htab);
5692
5693 /* We need reg_raw_mode, so initialize the modes now. */
5694 init_reg_modes_target ();
5695
5696 /* Assign register numbers to the globally defined register rtx. */
5697 pc_rtx = gen_rtx_PC (VOIDmode);
5698 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5699 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5700 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5701 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5702 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5703 virtual_incoming_args_rtx =
5704 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5705 virtual_stack_vars_rtx =
5706 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5707 virtual_stack_dynamic_rtx =
5708 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5709 virtual_outgoing_args_rtx =
5710 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5711 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5712
5713 /* Initialize RTL for commonly used hard registers. These are
5714 copied into regno_reg_rtx as we begin to compile each function. */
5715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
679bcc8d 5716 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6d8b68a3 5717
5718#ifdef RETURN_ADDRESS_POINTER_REGNUM
5719 return_address_pointer_rtx
5720 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5721#endif
5722
6d8b68a3 5723 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5724 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5725 else
5726 pic_offset_table_rtx = NULL_RTX;
5727}
5728
01703575 5729/* Create some permanent unique rtl objects shared between all functions. */
15bbde2b 5730
5731void
01703575 5732init_emit_once (void)
15bbde2b 5733{
5734 int i;
5735 enum machine_mode mode;
9e042f31 5736 enum machine_mode double_mode;
15bbde2b 5737
e397ad8e 5738 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5739 hash tables. */
573aba85 5740 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5741 const_int_htab_eq, NULL);
c6259b83 5742
573aba85 5743 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5744 const_double_htab_eq, NULL);
2ff23ed0 5745
e397ad8e 5746 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5747 const_fixed_htab_eq, NULL);
5748
573aba85 5749 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5750 mem_attrs_htab_eq, NULL);
ca74b940 5751 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5752 reg_attrs_htab_eq, NULL);
77695070 5753
71d7daa2 5754 /* Compute the word and byte modes. */
5755
5756 byte_mode = VOIDmode;
5757 word_mode = VOIDmode;
5758 double_mode = VOIDmode;
5759
069b07bf 5760 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5761 mode != VOIDmode;
71d7daa2 5762 mode = GET_MODE_WIDER_MODE (mode))
5763 {
5764 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5765 && byte_mode == VOIDmode)
5766 byte_mode = mode;
5767
5768 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5769 && word_mode == VOIDmode)
5770 word_mode = mode;
5771 }
5772
069b07bf 5773 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5774 mode != VOIDmode;
71d7daa2 5775 mode = GET_MODE_WIDER_MODE (mode))
5776 {
5777 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5778 && double_mode == VOIDmode)
5779 double_mode = mode;
5780 }
5781
5782 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5783
57c097d5 5784#ifdef INIT_EXPANDERS
ab5beff9 5785 /* This is to initialize {init|mark|free}_machine_status before the first
5786 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5787 end which calls push_function_context_to before the first call to
57c097d5 5788 init_function_start. */
5789 INIT_EXPANDERS;
5790#endif
5791
15bbde2b 5792 /* Create the unique rtx's for certain rtx codes and operand values. */
5793
8fd5918e 5794 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
7014838c 5795 tries to use these variables. */
15bbde2b 5796 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5797 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5798 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5799
1a60f06a 5800 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5801 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5802 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5803 else
3ad7bb1c 5804 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5805
2ff23ed0 5806 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5807 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5808 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
3fa759a9 5809
5810 dconstm1 = dconst1;
5811 dconstm1.sign = 1;
77e89269 5812
5813 dconsthalf = dconst1;
9d96125b 5814 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
15bbde2b 5815
8918c507 5816 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
15bbde2b 5817 {
3fa759a9 5818 const REAL_VALUE_TYPE *const r =
badfe841 5819 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5820
069b07bf 5821 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5822 mode != VOIDmode;
5823 mode = GET_MODE_WIDER_MODE (mode))
5824 const_tiny_rtx[i][(int) mode] =
5825 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5826
5827 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5828 mode != VOIDmode;
15bbde2b 5829 mode = GET_MODE_WIDER_MODE (mode))
2ff23ed0 5830 const_tiny_rtx[i][(int) mode] =
5831 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
15bbde2b 5832
b572011e 5833 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5834
069b07bf 5835 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5836 mode != VOIDmode;
15bbde2b 5837 mode = GET_MODE_WIDER_MODE (mode))
b572011e 5838 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5839
5840 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5841 mode != VOIDmode;
5842 mode = GET_MODE_WIDER_MODE (mode))
5843 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5844 }
5845
4248fc32 5846 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5847 mode != VOIDmode;
5848 mode = GET_MODE_WIDER_MODE (mode))
5849 {
5850 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5851 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5852 }
5853
5854 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5855 mode != VOIDmode;
5856 mode = GET_MODE_WIDER_MODE (mode))
5857 {
5858 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5859 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5860 }
5861
886cfd4f 5862 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5863 mode != VOIDmode;
5864 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5865 {
5866 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5867 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5868 }
886cfd4f 5869
5870 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5871 mode != VOIDmode;
5872 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5873 {
5874 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5875 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5876 }
886cfd4f 5877
06f0b99c 5878 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5879 mode != VOIDmode;
5880 mode = GET_MODE_WIDER_MODE (mode))
5881 {
5882 FCONST0(mode).data.high = 0;
5883 FCONST0(mode).data.low = 0;
5884 FCONST0(mode).mode = mode;
e397ad8e 5885 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5886 FCONST0 (mode), mode);
06f0b99c 5887 }
5888
5889 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5890 mode != VOIDmode;
5891 mode = GET_MODE_WIDER_MODE (mode))
5892 {
5893 FCONST0(mode).data.high = 0;
5894 FCONST0(mode).data.low = 0;
5895 FCONST0(mode).mode = mode;
e397ad8e 5896 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5897 FCONST0 (mode), mode);
06f0b99c 5898 }
5899
5900 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5901 mode != VOIDmode;
5902 mode = GET_MODE_WIDER_MODE (mode))
5903 {
5904 FCONST0(mode).data.high = 0;
5905 FCONST0(mode).data.low = 0;
5906 FCONST0(mode).mode = mode;
e397ad8e 5907 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5908 FCONST0 (mode), mode);
06f0b99c 5909
5910 /* We store the value 1. */
5911 FCONST1(mode).data.high = 0;
5912 FCONST1(mode).data.low = 0;
5913 FCONST1(mode).mode = mode;
5914 lshift_double (1, 0, GET_MODE_FBIT (mode),
5915 2 * HOST_BITS_PER_WIDE_INT,
5916 &FCONST1(mode).data.low,
5917 &FCONST1(mode).data.high,
5918 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5919 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5920 FCONST1 (mode), mode);
06f0b99c 5921 }
5922
5923 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5924 mode != VOIDmode;
5925 mode = GET_MODE_WIDER_MODE (mode))
5926 {
5927 FCONST0(mode).data.high = 0;
5928 FCONST0(mode).data.low = 0;
5929 FCONST0(mode).mode = mode;
e397ad8e 5930 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5931 FCONST0 (mode), mode);
06f0b99c 5932
5933 /* We store the value 1. */
5934 FCONST1(mode).data.high = 0;
5935 FCONST1(mode).data.low = 0;
5936 FCONST1(mode).mode = mode;
5937 lshift_double (1, 0, GET_MODE_FBIT (mode),
5938 2 * HOST_BITS_PER_WIDE_INT,
5939 &FCONST1(mode).data.low,
5940 &FCONST1(mode).data.high,
5941 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5942 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5943 FCONST1 (mode), mode);
5944 }
5945
5946 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5947 mode != VOIDmode;
5948 mode = GET_MODE_WIDER_MODE (mode))
5949 {
5950 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5951 }
5952
5953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5954 mode != VOIDmode;
5955 mode = GET_MODE_WIDER_MODE (mode))
5956 {
5957 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5958 }
5959
5960 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5961 mode != VOIDmode;
5962 mode = GET_MODE_WIDER_MODE (mode))
5963 {
5964 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5965 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5966 }
5967
5968 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5969 mode != VOIDmode;
5970 mode = GET_MODE_WIDER_MODE (mode))
5971 {
5972 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5973 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
06f0b99c 5974 }
5975
0fd4500a 5976 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5977 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5978 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 5979
065336b4 5980 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5981 if (STORE_FLAG_VALUE == 1)
5982 const_tiny_rtx[1][(int) BImode] = const1_rtx;
15bbde2b 5983}
ac6c481d 5984\f
cd0fe062 5985/* Produce exact duplicate of insn INSN after AFTER.
5986 Care updating of libcall regions if present. */
5987
5988rtx
35cb5232 5989emit_copy_of_insn_after (rtx insn, rtx after)
cd0fe062 5990{
9ce37fa7 5991 rtx new_rtx, link;
cd0fe062 5992
5993 switch (GET_CODE (insn))
5994 {
5995 case INSN:
9ce37fa7 5996 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5997 break;
5998
5999 case JUMP_INSN:
9ce37fa7 6000 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6001 break;
6002
9845d120 6003 case DEBUG_INSN:
6004 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6005 break;
6006
cd0fe062 6007 case CALL_INSN:
9ce37fa7 6008 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 6009 if (CALL_INSN_FUNCTION_USAGE (insn))
9ce37fa7 6010 CALL_INSN_FUNCTION_USAGE (new_rtx)
cd0fe062 6011 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
9ce37fa7 6012 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6013 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6014 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
48e1416a 6015 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
9c2a0c05 6016 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
cd0fe062 6017 break;
6018
6019 default:
611234b4 6020 gcc_unreachable ();
cd0fe062 6021 }
6022
6023 /* Update LABEL_NUSES. */
9ce37fa7 6024 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
cd0fe062 6025
9ce37fa7 6026 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ab87d1bc 6027
98116afd 6028 /* If the old insn is frame related, then so is the new one. This is
6029 primarily needed for IA-64 unwind info which marks epilogue insns,
6030 which may be duplicated by the basic block reordering code. */
9ce37fa7 6031 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
98116afd 6032
19d2fe05 6033 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6034 will make them. REG_LABEL_TARGETs are created there too, but are
6035 supposed to be sticky, so we copy them. */
cd0fe062 6036 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
19d2fe05 6037 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
cd0fe062 6038 {
6039 if (GET_CODE (link) == EXPR_LIST)
9ce37fa7 6040 add_reg_note (new_rtx, REG_NOTE_KIND (link),
a1ddb869 6041 copy_insn_1 (XEXP (link, 0)));
cd0fe062 6042 else
9ce37fa7 6043 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
cd0fe062 6044 }
6045
9ce37fa7 6046 INSN_CODE (new_rtx) = INSN_CODE (insn);
6047 return new_rtx;
cd0fe062 6048}
1f3233d1 6049
7035b2ab 6050static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
c09425a0 6051rtx
6052gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6053{
6054 if (hard_reg_clobbers[mode][regno])
6055 return hard_reg_clobbers[mode][regno];
6056 else
6057 return (hard_reg_clobbers[mode][regno] =
6058 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6059}
6060
1f3233d1 6061#include "gt-emit-rtl.h"