]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
Modify gcc/*.[hc] double_int call sites to use the new interface.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
ddca3e9d 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3e052aec 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
18b7c118 4 2010, 2011
535664e3 5 Free Software Foundation, Inc.
15bbde2b 6
f12b58b3 7This file is part of GCC.
15bbde2b 8
f12b58b3 9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
8c4c00c1 11Software Foundation; either version 3, or (at your option) any later
f12b58b3 12version.
15bbde2b 13
f12b58b3 14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
15bbde2b 18
19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
15bbde2b 22
23
24/* Middle-to-low level generation of rtx code and insns.
25
74efa612 26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
15bbde2b 28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
74efa612 31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
8fd5918e 34 dependent is the kind of rtx's they make and what arguments they
35 use. */
15bbde2b 36
37#include "config.h"
405711de 38#include "system.h"
805e22b2 39#include "coretypes.h"
40#include "tm.h"
0b205f4c 41#include "diagnostic-core.h"
15bbde2b 42#include "rtl.h"
3fd7e17f 43#include "tree.h"
7953c610 44#include "tm_p.h"
15bbde2b 45#include "flags.h"
46#include "function.h"
47#include "expr.h"
23a070f3 48#include "vecprim.h"
15bbde2b 49#include "regs.h"
c6b6c51f 50#include "hard-reg-set.h"
73f5c1e3 51#include "hashtab.h"
15bbde2b 52#include "insn-config.h"
0dbd1c74 53#include "recog.h"
a3426c4c 54#include "bitmap.h"
f3d96a58 55#include "basic-block.h"
a7b0c170 56#include "ggc.h"
b29760a8 57#include "debug.h"
b0278d39 58#include "langhooks.h"
3072d30e 59#include "df.h"
9845d120 60#include "params.h"
98155838 61#include "target.h"
649d8da6 62
679bcc8d 63struct target_rtl default_target_rtl;
64#if SWITCHABLE_TARGET
65struct target_rtl *this_target_rtl = &default_target_rtl;
66#endif
67
68#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
399d45d3 70/* Commonly used modes. */
71
a92771b8 72enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9e042f31 74enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
a92771b8 75enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 76
b079a207 77/* Datastructures maintained for currently processed function in RTL form. */
78
fd6ffb7c 79struct rtl_data x_rtl;
b079a207 80
81/* Indexed by pseudo register number, gives the rtx for that pseudo.
48e1416a 82 Allocated in parallel with regno_pointer_align.
b079a207 83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86rtx * regno_reg_rtx;
15bbde2b 87
88/* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
9105005a 91static GTY(()) int label_num = 1;
15bbde2b 92
15bbde2b 93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
ba8dfb08 95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
15bbde2b 97
ba8dfb08 98rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
15bbde2b 99
1a60f06a 100rtx const_true_rtx;
101
15bbde2b 102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
77e89269 106REAL_VALUE_TYPE dconsthalf;
15bbde2b 107
06f0b99c 108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
15bbde2b 112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
57c097d5 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 118
7d7b0bac 119/* Standard pieces of rtx, to be substituted directly into things. */
120rtx pc_rtx;
121rtx ret_rtx;
122rtx simple_return_rtx;
123rtx cc0_rtx;
124
73f5c1e3 125/* A hash table storing CONST_INTs whose absolute value is greater
126 than MAX_SAVED_CONST_INT. */
127
1f3233d1 128static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t const_int_htab;
73f5c1e3 130
c6259b83 131/* A hash table storing memory attribute structures. */
1f3233d1 132static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
133 htab_t mem_attrs_htab;
c6259b83 134
ca74b940 135/* A hash table storing register attribute structures. */
136static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
137 htab_t reg_attrs_htab;
138
2ff23ed0 139/* A hash table storing all CONST_DOUBLEs. */
1f3233d1 140static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
141 htab_t const_double_htab;
2ff23ed0 142
e397ad8e 143/* A hash table storing all CONST_FIXEDs. */
144static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
145 htab_t const_fixed_htab;
146
fd6ffb7c 147#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
9845d120 148#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
fd6ffb7c 149#define first_label_num (crtl->emit.x_first_label_num)
15bbde2b 150
35cb5232 151static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
265be050 152static void set_used_decls (tree);
35cb5232 153static void mark_label_nuses (rtx);
154static hashval_t const_int_htab_hash (const void *);
155static int const_int_htab_eq (const void *, const void *);
156static hashval_t const_double_htab_hash (const void *);
157static int const_double_htab_eq (const void *, const void *);
158static rtx lookup_const_double (rtx);
e397ad8e 159static hashval_t const_fixed_htab_hash (const void *);
160static int const_fixed_htab_eq (const void *, const void *);
161static rtx lookup_const_fixed (rtx);
35cb5232 162static hashval_t mem_attrs_htab_hash (const void *);
163static int mem_attrs_htab_eq (const void *, const void *);
35cb5232 164static hashval_t reg_attrs_htab_hash (const void *);
165static int reg_attrs_htab_eq (const void *, const void *);
166static reg_attrs *get_reg_attrs (tree, int);
6e68dcb2 167static rtx gen_const_vector (enum machine_mode, int);
0e0727c4 168static void copy_rtx_if_shared_1 (rtx *orig);
73f5c1e3 169
3cd757b1 170/* Probability of the conditional branch currently proceeded by try_split.
171 Set to -1 otherwise. */
172int split_branch_probability = -1;
649d8da6 173\f
73f5c1e3 174/* Returns a hash code for X (which is a really a CONST_INT). */
175
176static hashval_t
35cb5232 177const_int_htab_hash (const void *x)
73f5c1e3 178{
dd9b9fc5 179 return (hashval_t) INTVAL ((const_rtx) x);
73f5c1e3 180}
181
6ef828f9 182/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 183 CONST_INT) is the same as that given by Y (which is really a
184 HOST_WIDE_INT *). */
185
186static int
35cb5232 187const_int_htab_eq (const void *x, const void *y)
73f5c1e3 188{
dd9b9fc5 189 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
2ff23ed0 190}
191
192/* Returns a hash code for X (which is really a CONST_DOUBLE). */
193static hashval_t
35cb5232 194const_double_htab_hash (const void *x)
2ff23ed0 195{
dd9b9fc5 196 const_rtx const value = (const_rtx) x;
3393215f 197 hashval_t h;
2ff23ed0 198
3393215f 199 if (GET_MODE (value) == VOIDmode)
200 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
201 else
a5760913 202 {
e2e205b3 203 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 204 /* MODE is used in the comparison, so it should be in the hash. */
205 h ^= GET_MODE (value);
206 }
2ff23ed0 207 return h;
208}
209
6ef828f9 210/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 211 is the same as that represented by Y (really a ...) */
212static int
35cb5232 213const_double_htab_eq (const void *x, const void *y)
2ff23ed0 214{
dd9b9fc5 215 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
2ff23ed0 216
217 if (GET_MODE (a) != GET_MODE (b))
218 return 0;
f82a103d 219 if (GET_MODE (a) == VOIDmode)
220 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
221 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
222 else
223 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
224 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 225}
226
e397ad8e 227/* Returns a hash code for X (which is really a CONST_FIXED). */
228
229static hashval_t
230const_fixed_htab_hash (const void *x)
231{
a9f1838b 232 const_rtx const value = (const_rtx) x;
e397ad8e 233 hashval_t h;
234
235 h = fixed_hash (CONST_FIXED_VALUE (value));
236 /* MODE is used in the comparison, so it should be in the hash. */
237 h ^= GET_MODE (value);
238 return h;
239}
240
241/* Returns nonzero if the value represented by X (really a ...)
242 is the same as that represented by Y (really a ...). */
243
244static int
245const_fixed_htab_eq (const void *x, const void *y)
246{
a9f1838b 247 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
e397ad8e 248
249 if (GET_MODE (a) != GET_MODE (b))
250 return 0;
251 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
252}
253
c6259b83 254/* Returns a hash code for X (which is a really a mem_attrs *). */
255
256static hashval_t
35cb5232 257mem_attrs_htab_hash (const void *x)
c6259b83 258{
dd9b9fc5 259 const mem_attrs *const p = (const mem_attrs *) x;
c6259b83 260
261 return (p->alias ^ (p->align * 1000)
bd1a81f7 262 ^ (p->addrspace * 4000)
6d58bcba 263 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
264 ^ ((p->size_known_p ? p->size : 0) * 2500000)
2f16183e 265 ^ (size_t) iterative_hash_expr (p->expr, 0));
c6259b83 266}
267
d72886b5 268/* Return true if the given memory attributes are equal. */
73f5c1e3 269
d72886b5 270static bool
271mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
73f5c1e3 272{
6d58bcba 273 return (p->alias == q->alias
274 && p->offset_known_p == q->offset_known_p
275 && (!p->offset_known_p || p->offset == q->offset)
276 && p->size_known_p == q->size_known_p
277 && (!p->size_known_p || p->size == q->size)
278 && p->align == q->align
bd1a81f7 279 && p->addrspace == q->addrspace
2f16183e 280 && (p->expr == q->expr
281 || (p->expr != NULL_TREE && q->expr != NULL_TREE
282 && operand_equal_p (p->expr, q->expr, 0))));
73f5c1e3 283}
284
d72886b5 285/* Returns nonzero if the value represented by X (which is really a
286 mem_attrs *) is the same as that given by Y (which is also really a
287 mem_attrs *). */
c6259b83 288
d72886b5 289static int
290mem_attrs_htab_eq (const void *x, const void *y)
c6259b83 291{
d72886b5 292 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
293}
c6259b83 294
d72886b5 295/* Set MEM's memory attributes so that they are the same as ATTRS. */
5cc193e7 296
d72886b5 297static void
298set_mem_attrs (rtx mem, mem_attrs *attrs)
299{
300 void **slot;
301
302 /* If everything is the default, we can just clear the attributes. */
303 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
304 {
305 MEM_ATTRS (mem) = 0;
306 return;
307 }
c6259b83 308
d72886b5 309 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
c6259b83 310 if (*slot == 0)
311 {
ba72912a 312 *slot = ggc_alloc_mem_attrs ();
d72886b5 313 memcpy (*slot, attrs, sizeof (mem_attrs));
c6259b83 314 }
315
d72886b5 316 MEM_ATTRS (mem) = (mem_attrs *) *slot;
73f5c1e3 317}
318
ca74b940 319/* Returns a hash code for X (which is a really a reg_attrs *). */
320
321static hashval_t
35cb5232 322reg_attrs_htab_hash (const void *x)
ca74b940 323{
aae87fc3 324 const reg_attrs *const p = (const reg_attrs *) x;
ca74b940 325
e19e0a33 326 return ((p->offset * 1000) ^ (intptr_t) p->decl);
ca74b940 327}
328
7ef5b942 329/* Returns nonzero if the value represented by X (which is really a
ca74b940 330 reg_attrs *) is the same as that given by Y (which is also really a
331 reg_attrs *). */
332
333static int
35cb5232 334reg_attrs_htab_eq (const void *x, const void *y)
ca74b940 335{
aae87fc3 336 const reg_attrs *const p = (const reg_attrs *) x;
337 const reg_attrs *const q = (const reg_attrs *) y;
ca74b940 338
339 return (p->decl == q->decl && p->offset == q->offset);
340}
341/* Allocate a new reg_attrs structure and insert it into the hash table if
342 one identical to it is not already in the table. We are doing this for
343 MEM of mode MODE. */
344
345static reg_attrs *
35cb5232 346get_reg_attrs (tree decl, int offset)
ca74b940 347{
348 reg_attrs attrs;
349 void **slot;
350
351 /* If everything is the default, we can just return zero. */
352 if (decl == 0 && offset == 0)
353 return 0;
354
355 attrs.decl = decl;
356 attrs.offset = offset;
357
358 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
359 if (*slot == 0)
360 {
ba72912a 361 *slot = ggc_alloc_reg_attrs ();
ca74b940 362 memcpy (*slot, &attrs, sizeof (reg_attrs));
363 }
364
2457c754 365 return (reg_attrs *) *slot;
ca74b940 366}
367
3072d30e 368
369#if !HAVE_blockage
370/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
371 across this insn. */
372
373rtx
374gen_blockage (void)
375{
376 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
377 MEM_VOLATILE_P (x) = true;
378 return x;
379}
380#endif
381
382
22cf44bc 383/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
384 don't attempt to share with the various global pieces of rtl (such as
385 frame_pointer_rtx). */
386
387rtx
35cb5232 388gen_raw_REG (enum machine_mode mode, int regno)
22cf44bc 389{
390 rtx x = gen_rtx_raw_REG (mode, regno);
391 ORIGINAL_REGNO (x) = regno;
392 return x;
393}
394
7014838c 395/* There are some RTL codes that require special attention; the generation
396 functions do the raw handling. If you add to this list, modify
397 special_rtx in gengenrtl.c as well. */
398
3ad7bb1c 399rtx
35cb5232 400gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3ad7bb1c 401{
73f5c1e3 402 void **slot;
403
3ad7bb1c 404 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 405 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 406
407#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
408 if (const_true_rtx && arg == STORE_FLAG_VALUE)
409 return const_true_rtx;
410#endif
411
73f5c1e3 412 /* Look up the CONST_INT in the hash table. */
2b3dbc20 413 slot = htab_find_slot_with_hash (const_int_htab, &arg,
414 (hashval_t) arg, INSERT);
7f2875d3 415 if (*slot == 0)
d7c47c0e 416 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 417
418 return (rtx) *slot;
3ad7bb1c 419}
420
2d232d05 421rtx
35cb5232 422gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2d232d05 423{
424 return GEN_INT (trunc_int_for_mode (c, mode));
425}
426
2ff23ed0 427/* CONST_DOUBLEs might be created from pairs of integers, or from
428 REAL_VALUE_TYPEs. Also, their length is known only at run time,
429 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
430
431/* Determine whether REAL, a CONST_DOUBLE, already exists in the
432 hash table. If so, return its counterpart; otherwise add it
433 to the hash table and return it. */
434static rtx
35cb5232 435lookup_const_double (rtx real)
2ff23ed0 436{
437 void **slot = htab_find_slot (const_double_htab, real, INSERT);
438 if (*slot == 0)
439 *slot = real;
440
441 return (rtx) *slot;
442}
7f2875d3 443
2ff23ed0 444/* Return a CONST_DOUBLE rtx for a floating-point value specified by
445 VALUE in mode MODE. */
67f2a2eb 446rtx
35cb5232 447const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
67f2a2eb 448{
2ff23ed0 449 rtx real = rtx_alloc (CONST_DOUBLE);
450 PUT_MODE (real, mode);
451
e8aaae4e 452 real->u.rv = value;
2ff23ed0 453
454 return lookup_const_double (real);
455}
456
e397ad8e 457/* Determine whether FIXED, a CONST_FIXED, already exists in the
458 hash table. If so, return its counterpart; otherwise add it
459 to the hash table and return it. */
460
461static rtx
462lookup_const_fixed (rtx fixed)
463{
464 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
465 if (*slot == 0)
466 *slot = fixed;
467
468 return (rtx) *slot;
469}
470
471/* Return a CONST_FIXED rtx for a fixed-point value specified by
472 VALUE in mode MODE. */
473
474rtx
475const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
476{
477 rtx fixed = rtx_alloc (CONST_FIXED);
478 PUT_MODE (fixed, mode);
479
480 fixed->u.fv = value;
481
482 return lookup_const_fixed (fixed);
483}
484
33274180 485/* Constructs double_int from rtx CST. */
486
487double_int
488rtx_to_double_int (const_rtx cst)
489{
490 double_int r;
491
492 if (CONST_INT_P (cst))
cf8f0e63 493 r = double_int::from_shwi (INTVAL (cst));
78f1962f 494 else if (CONST_DOUBLE_AS_INT_P (cst))
33274180 495 {
496 r.low = CONST_DOUBLE_LOW (cst);
497 r.high = CONST_DOUBLE_HIGH (cst);
498 }
499 else
500 gcc_unreachable ();
501
502 return r;
503}
504
505
3e052aec 506/* Return a CONST_DOUBLE or CONST_INT for a value specified as
507 a double_int. */
508
509rtx
510immed_double_int_const (double_int i, enum machine_mode mode)
511{
512 return immed_double_const (i.low, i.high, mode);
513}
514
2ff23ed0 515/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
516 of ints: I0 is the low-order word and I1 is the high-order word.
24cd46a7 517 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
db20fb47 518 implied upper bits are copies of the high bit of i1. The value
519 itself is neither signed nor unsigned. Do not use this routine for
520 non-integer modes; convert to REAL_VALUE_TYPE and use
521 CONST_DOUBLE_FROM_REAL_VALUE. */
2ff23ed0 522
523rtx
35cb5232 524immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
2ff23ed0 525{
526 rtx value;
527 unsigned int i;
528
b1ca4af4 529 /* There are the following cases (note that there are no modes with
24cd46a7 530 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
b1ca4af4 531
532 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
533 gen_int_mode.
db20fb47 534 2) If the value of the integer fits into HOST_WIDE_INT anyway
535 (i.e., i1 consists only from copies of the sign bit, and sign
536 of i0 and i1 are the same), then we return a CONST_INT for i0.
b1ca4af4 537 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
2ff23ed0 538 if (mode != VOIDmode)
539 {
611234b4 540 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
541 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
542 /* We can get a 0 for an error mark. */
543 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
2ff23ed0 545
b1ca4af4 546 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
547 return gen_int_mode (i0, mode);
2ff23ed0 548 }
549
550 /* If this integer fits in one word, return a CONST_INT. */
551 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
552 return GEN_INT (i0);
553
554 /* We use VOIDmode for integers. */
555 value = rtx_alloc (CONST_DOUBLE);
556 PUT_MODE (value, VOIDmode);
557
558 CONST_DOUBLE_LOW (value) = i0;
559 CONST_DOUBLE_HIGH (value) = i1;
560
561 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
562 XWINT (value, i) = 0;
563
564 return lookup_const_double (value);
67f2a2eb 565}
566
3ad7bb1c 567rtx
35cb5232 568gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3ad7bb1c 569{
570 /* In case the MD file explicitly references the frame pointer, have
571 all such references point to the same frame pointer. This is
572 used during frame pointer elimination to distinguish the explicit
573 references to these registers from pseudos that happened to be
574 assigned to them.
575
576 If we have eliminated the frame pointer or arg pointer, we will
577 be using it as a normal register, for example as a spill
578 register. In such cases, we might be accessing it in a mode that
579 is not Pmode and therefore cannot use the pre-allocated rtx.
580
581 Also don't do this when we are making new REGs in reload, since
582 we don't want to get confused with the real pointers. */
583
584 if (mode == Pmode && !reload_in_progress)
585 {
71801afc 586 if (regno == FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 588 return frame_pointer_rtx;
5ae82d58 589#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
71801afc 590 if (regno == HARD_FRAME_POINTER_REGNUM
591 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 592 return hard_frame_pointer_rtx;
593#endif
5ae82d58 594#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
e8b59353 595 if (regno == ARG_POINTER_REGNUM)
3ad7bb1c 596 return arg_pointer_rtx;
597#endif
598#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 599 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 600 return return_address_pointer_rtx;
601#endif
3473aefe 602 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
8d43ad05 603 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
6ea47475 604 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 605 return pic_offset_table_rtx;
e8b59353 606 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 607 return stack_pointer_rtx;
608 }
609
32b53d83 610#if 0
90295bd2 611 /* If the per-function register table has been set up, try to re-use
32b53d83 612 an existing entry in that table to avoid useless generation of RTL.
613
614 This code is disabled for now until we can fix the various backends
615 which depend on having non-shared hard registers in some cases. Long
616 term we want to re-enable this code as it can significantly cut down
71801afc 617 on the amount of useless RTL that gets generated.
618
619 We'll also need to fix some code that runs after reload that wants to
620 set ORIGINAL_REGNO. */
621
90295bd2 622 if (cfun
623 && cfun->emit
624 && regno_reg_rtx
625 && regno < FIRST_PSEUDO_REGISTER
626 && reg_raw_mode[regno] == mode)
627 return regno_reg_rtx[regno];
32b53d83 628#endif
90295bd2 629
22cf44bc 630 return gen_raw_REG (mode, regno);
3ad7bb1c 631}
632
b5ba9f3a 633rtx
35cb5232 634gen_rtx_MEM (enum machine_mode mode, rtx addr)
b5ba9f3a 635{
636 rtx rt = gen_rtx_raw_MEM (mode, addr);
637
638 /* This field is not cleared by the mere allocation of the rtx, so
639 we clear it here. */
c6259b83 640 MEM_ATTRS (rt) = 0;
b5ba9f3a 641
642 return rt;
643}
701e46d0 644
e265a6da 645/* Generate a memory referring to non-trapping constant memory. */
646
647rtx
648gen_const_mem (enum machine_mode mode, rtx addr)
649{
650 rtx mem = gen_rtx_MEM (mode, addr);
651 MEM_READONLY_P (mem) = 1;
652 MEM_NOTRAP_P (mem) = 1;
653 return mem;
654}
655
00060fc2 656/* Generate a MEM referring to fixed portions of the frame, e.g., register
657 save areas. */
658
659rtx
660gen_frame_mem (enum machine_mode mode, rtx addr)
661{
662 rtx mem = gen_rtx_MEM (mode, addr);
663 MEM_NOTRAP_P (mem) = 1;
664 set_mem_alias_set (mem, get_frame_alias_set ());
665 return mem;
666}
667
668/* Generate a MEM referring to a temporary use of the stack, not part
669 of the fixed stack frame. For example, something which is pushed
670 by a target splitter. */
671rtx
672gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
673{
674 rtx mem = gen_rtx_MEM (mode, addr);
675 MEM_NOTRAP_P (mem) = 1;
18d50ae6 676 if (!cfun->calls_alloca)
00060fc2 677 set_mem_alias_set (mem, get_frame_alias_set ());
678 return mem;
679}
680
2166bbaa 681/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
682 this construct would be valid, and false otherwise. */
683
684bool
685validate_subreg (enum machine_mode omode, enum machine_mode imode,
7ecb5bb2 686 const_rtx reg, unsigned int offset)
701e46d0 687{
2166bbaa 688 unsigned int isize = GET_MODE_SIZE (imode);
689 unsigned int osize = GET_MODE_SIZE (omode);
690
691 /* All subregs must be aligned. */
692 if (offset % osize != 0)
693 return false;
694
695 /* The subreg offset cannot be outside the inner object. */
696 if (offset >= isize)
697 return false;
698
699 /* ??? This should not be here. Temporarily continue to allow word_mode
700 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
701 Generally, backends are doing something sketchy but it'll take time to
702 fix them all. */
703 if (omode == word_mode)
704 ;
705 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
706 is the culprit here, and not the backends. */
707 else if (osize >= UNITS_PER_WORD && isize >= osize)
708 ;
709 /* Allow component subregs of complex and vector. Though given the below
710 extraction rules, it's not always clear what that means. */
711 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
712 && GET_MODE_INNER (imode) == omode)
713 ;
714 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
715 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
716 represent this. It's questionable if this ought to be represented at
717 all -- why can't this all be hidden in post-reload splitters that make
718 arbitrarily mode changes to the registers themselves. */
719 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
720 ;
721 /* Subregs involving floating point modes are not allowed to
722 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
723 (subreg:SI (reg:DF) 0) isn't. */
724 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
725 {
726 if (isize != osize)
727 return false;
728 }
701e46d0 729
2166bbaa 730 /* Paradoxical subregs must have offset zero. */
731 if (osize > isize)
732 return offset == 0;
733
734 /* This is a normal subreg. Verify that the offset is representable. */
735
736 /* For hard registers, we already have most of these rules collected in
737 subreg_offset_representable_p. */
738 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
739 {
740 unsigned int regno = REGNO (reg);
741
742#ifdef CANNOT_CHANGE_MODE_CLASS
743 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
744 && GET_MODE_INNER (imode) == omode)
745 ;
746 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
747 return false;
701e46d0 748#endif
2166bbaa 749
750 return subreg_offset_representable_p (regno, imode, offset, omode);
751 }
752
753 /* For pseudo registers, we want most of the same checks. Namely:
754 If the register no larger than a word, the subreg must be lowpart.
755 If the register is larger than a word, the subreg must be the lowpart
756 of a subword. A subreg does *not* perform arbitrary bit extraction.
757 Given that we've already checked mode/offset alignment, we only have
758 to check subword subregs here. */
759 if (osize < UNITS_PER_WORD)
760 {
761 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
762 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
763 if (offset % UNITS_PER_WORD != low_off)
764 return false;
765 }
766 return true;
767}
768
769rtx
770gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
771{
772 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
2ff23ed0 773 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 774}
775
c6259b83 776/* Generate a SUBREG representing the least-significant part of REG if MODE
777 is smaller than mode of REG, otherwise paradoxical SUBREG. */
778
701e46d0 779rtx
35cb5232 780gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
701e46d0 781{
782 enum machine_mode inmode;
701e46d0 783
784 inmode = GET_MODE (reg);
785 if (inmode == VOIDmode)
786 inmode = mode;
81802af6 787 return gen_rtx_SUBREG (mode, reg,
788 subreg_lowpart_offset (mode, inmode));
701e46d0 789}
7014838c 790\f
15bbde2b 791
cf9ac040 792/* Create an rtvec and stores within it the RTXen passed in the arguments. */
793
15bbde2b 794rtvec
ee582a61 795gen_rtvec (int n, ...)
15bbde2b 796{
cf9ac040 797 int i;
798 rtvec rt_val;
ee582a61 799 va_list p;
15bbde2b 800
ee582a61 801 va_start (p, n);
15bbde2b 802
cf9ac040 803 /* Don't allocate an empty rtvec... */
15bbde2b 804 if (n == 0)
451c8e2f 805 {
806 va_end (p);
807 return NULL_RTVEC;
808 }
15bbde2b 809
cf9ac040 810 rt_val = rtvec_alloc (n);
e5fcd76a 811
15bbde2b 812 for (i = 0; i < n; i++)
cf9ac040 813 rt_val->elem[i] = va_arg (p, rtx);
7ad77798 814
ee582a61 815 va_end (p);
cf9ac040 816 return rt_val;
15bbde2b 817}
818
819rtvec
35cb5232 820gen_rtvec_v (int n, rtx *argp)
15bbde2b 821{
19cb6b50 822 int i;
823 rtvec rt_val;
15bbde2b 824
cf9ac040 825 /* Don't allocate an empty rtvec... */
15bbde2b 826 if (n == 0)
cf9ac040 827 return NULL_RTVEC;
15bbde2b 828
cf9ac040 829 rt_val = rtvec_alloc (n);
15bbde2b 830
831 for (i = 0; i < n; i++)
a4070a91 832 rt_val->elem[i] = *argp++;
15bbde2b 833
834 return rt_val;
835}
836\f
80c70e76 837/* Return the number of bytes between the start of an OUTER_MODE
838 in-memory value and the start of an INNER_MODE in-memory value,
839 given that the former is a lowpart of the latter. It may be a
840 paradoxical lowpart, in which case the offset will be negative
841 on big-endian targets. */
842
843int
844byte_lowpart_offset (enum machine_mode outer_mode,
845 enum machine_mode inner_mode)
846{
847 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
848 return subreg_lowpart_offset (outer_mode, inner_mode);
849 else
850 return -subreg_lowpart_offset (inner_mode, outer_mode);
851}
852\f
15bbde2b 853/* Generate a REG rtx for a new pseudo register of mode MODE.
854 This pseudo is assigned the next sequential register number. */
855
856rtx
35cb5232 857gen_reg_rtx (enum machine_mode mode)
15bbde2b 858{
19cb6b50 859 rtx val;
27a7a23a 860 unsigned int align = GET_MODE_ALIGNMENT (mode);
15bbde2b 861
1b7ff857 862 gcc_assert (can_create_pseudo_p ());
15bbde2b 863
27a7a23a 864 /* If a virtual register with bigger mode alignment is generated,
865 increase stack alignment estimation because it might be spilled
866 to stack later. */
48e1416a 867 if (SUPPORTS_STACK_ALIGNMENT
27a7a23a 868 && crtl->stack_alignment_estimated < align
869 && !crtl->stack_realign_processed)
8645d3e7 870 {
871 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
872 if (crtl->stack_alignment_estimated < min_align)
873 crtl->stack_alignment_estimated = min_align;
874 }
27a7a23a 875
316bc009 876 if (generating_concat_p
877 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
878 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 879 {
880 /* For complex modes, don't make a single pseudo.
881 Instead, make a CONCAT of two pseudos.
882 This allows noncontiguous allocation of the real and imaginary parts,
883 which makes much better code. Besides, allocating DCmode
884 pseudos overstrains reload on some machines like the 386. */
885 rtx realpart, imagpart;
e9e12845 886 enum machine_mode partmode = GET_MODE_INNER (mode);
76c37538 887
888 realpart = gen_reg_rtx (partmode);
889 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 890 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 891 }
892
ca74b940 893 /* Make sure regno_pointer_align, and regno_reg_rtx are large
fcdc122e 894 enough to have an element for this pseudo reg number. */
15bbde2b 895
fd6ffb7c 896 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
15bbde2b 897 {
fd6ffb7c 898 int old_size = crtl->emit.regno_pointer_align_length;
9ce37fa7 899 char *tmp;
fcdc122e 900 rtx *new1;
fcdc122e 901
9ce37fa7 902 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
903 memset (tmp + old_size, 0, old_size);
904 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
0a893c29 905
2457c754 906 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
0a893c29 907 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 908 regno_reg_rtx = new1;
909
fd6ffb7c 910 crtl->emit.regno_pointer_align_length = old_size * 2;
15bbde2b 911 }
912
22cf44bc 913 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 914 regno_reg_rtx[reg_rtx_no++] = val;
915 return val;
916}
917
80c70e76 918/* Update NEW with the same attributes as REG, but with OFFSET added
919 to the REG_OFFSET. */
ca74b940 920
1a6a0f2a 921static void
9ce37fa7 922update_reg_offset (rtx new_rtx, rtx reg, int offset)
ca74b940 923{
9ce37fa7 924 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
35cb5232 925 REG_OFFSET (reg) + offset);
1a6a0f2a 926}
927
80c70e76 928/* Generate a register with same attributes as REG, but with OFFSET
929 added to the REG_OFFSET. */
1a6a0f2a 930
931rtx
932gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
933 int offset)
934{
9ce37fa7 935 rtx new_rtx = gen_rtx_REG (mode, regno);
1a6a0f2a 936
9ce37fa7 937 update_reg_offset (new_rtx, reg, offset);
938 return new_rtx;
1a6a0f2a 939}
940
941/* Generate a new pseudo-register with the same attributes as REG, but
80c70e76 942 with OFFSET added to the REG_OFFSET. */
1a6a0f2a 943
944rtx
945gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
946{
9ce37fa7 947 rtx new_rtx = gen_reg_rtx (mode);
1a6a0f2a 948
9ce37fa7 949 update_reg_offset (new_rtx, reg, offset);
950 return new_rtx;
ca74b940 951}
952
80c70e76 953/* Adjust REG in-place so that it has mode MODE. It is assumed that the
954 new register is a (possibly paradoxical) lowpart of the old one. */
ca74b940 955
956void
80c70e76 957adjust_reg_mode (rtx reg, enum machine_mode mode)
ca74b940 958{
80c70e76 959 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
960 PUT_MODE (reg, mode);
961}
962
963/* Copy REG's attributes from X, if X has any attributes. If REG and X
964 have different modes, REG is a (possibly paradoxical) lowpart of X. */
965
966void
967set_reg_attrs_from_value (rtx reg, rtx x)
968{
969 int offset;
e623c80a 970 bool can_be_reg_pointer = true;
971
972 /* Don't call mark_reg_pointer for incompatible pointer sign
973 extension. */
974 while (GET_CODE (x) == SIGN_EXTEND
975 || GET_CODE (x) == ZERO_EXTEND
976 || GET_CODE (x) == TRUNCATE
977 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
978 {
979#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
980 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
981 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
982 can_be_reg_pointer = false;
983#endif
984 x = XEXP (x, 0);
985 }
80c70e76 986
ac56145e 987 /* Hard registers can be reused for multiple purposes within the same
988 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
989 on them is wrong. */
990 if (HARD_REGISTER_P (reg))
991 return;
992
80c70e76 993 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
ae12ddda 994 if (MEM_P (x))
995 {
da443c27 996 if (MEM_OFFSET_KNOWN_P (x))
997 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
998 MEM_OFFSET (x) + offset);
e623c80a 999 if (can_be_reg_pointer && MEM_POINTER (x))
40b93dba 1000 mark_reg_pointer (reg, 0);
ae12ddda 1001 }
1002 else if (REG_P (x))
1003 {
1004 if (REG_ATTRS (x))
1005 update_reg_offset (reg, x, offset);
e623c80a 1006 if (can_be_reg_pointer && REG_POINTER (x))
ae12ddda 1007 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1008 }
1009}
1010
1011/* Generate a REG rtx for a new pseudo register, copying the mode
1012 and attributes from X. */
1013
1014rtx
1015gen_reg_rtx_and_attrs (rtx x)
1016{
1017 rtx reg = gen_reg_rtx (GET_MODE (x));
1018 set_reg_attrs_from_value (reg, x);
1019 return reg;
ca74b940 1020}
1021
263c416c 1022/* Set the register attributes for registers contained in PARM_RTX.
1023 Use needed values from memory attributes of MEM. */
1024
1025void
35cb5232 1026set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
263c416c 1027{
8ad4c111 1028 if (REG_P (parm_rtx))
80c70e76 1029 set_reg_attrs_from_value (parm_rtx, mem);
263c416c 1030 else if (GET_CODE (parm_rtx) == PARALLEL)
1031 {
1032 /* Check for a NULL entry in the first slot, used to indicate that the
1033 parameter goes both on the stack and in registers. */
1034 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1035 for (; i < XVECLEN (parm_rtx, 0); i++)
1036 {
1037 rtx x = XVECEXP (parm_rtx, 0, i);
8ad4c111 1038 if (REG_P (XEXP (x, 0)))
263c416c 1039 REG_ATTRS (XEXP (x, 0))
1040 = get_reg_attrs (MEM_EXPR (mem),
1041 INTVAL (XEXP (x, 1)));
1042 }
1043 }
1044}
1045
80c70e76 1046/* Set the REG_ATTRS for registers in value X, given that X represents
1047 decl T. */
ca74b940 1048
a8dd994c 1049void
80c70e76 1050set_reg_attrs_for_decl_rtl (tree t, rtx x)
1051{
1052 if (GET_CODE (x) == SUBREG)
ebfc27f5 1053 {
80c70e76 1054 gcc_assert (subreg_lowpart_p (x));
1055 x = SUBREG_REG (x);
ebfc27f5 1056 }
8ad4c111 1057 if (REG_P (x))
80c70e76 1058 REG_ATTRS (x)
1059 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
3ff99faa 1060 DECL_MODE (t)));
ca74b940 1061 if (GET_CODE (x) == CONCAT)
1062 {
1063 if (REG_P (XEXP (x, 0)))
1064 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1065 if (REG_P (XEXP (x, 1)))
1066 REG_ATTRS (XEXP (x, 1))
1067 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1068 }
1069 if (GET_CODE (x) == PARALLEL)
1070 {
85d25060 1071 int i, start;
1072
1073 /* Check for a NULL entry, used to indicate that the parameter goes
1074 both on the stack and in registers. */
1075 if (XEXP (XVECEXP (x, 0, 0), 0))
1076 start = 0;
1077 else
1078 start = 1;
1079
1080 for (i = start; i < XVECLEN (x, 0); i++)
ca74b940 1081 {
1082 rtx y = XVECEXP (x, 0, i);
1083 if (REG_P (XEXP (y, 0)))
1084 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1085 }
1086 }
1087}
1088
80c70e76 1089/* Assign the RTX X to declaration T. */
1090
1091void
1092set_decl_rtl (tree t, rtx x)
1093{
1094 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1095 if (x)
1096 set_reg_attrs_for_decl_rtl (t, x);
1097}
1098
d91cf567 1099/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1100 if the ABI requires the parameter to be passed by reference. */
80c70e76 1101
1102void
d91cf567 1103set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
80c70e76 1104{
1105 DECL_INCOMING_RTL (t) = x;
d91cf567 1106 if (x && !by_reference_p)
80c70e76 1107 set_reg_attrs_for_decl_rtl (t, x);
1108}
1109
de8ecfb5 1110/* Identify REG (which may be a CONCAT) as a user register. */
1111
1112void
35cb5232 1113mark_user_reg (rtx reg)
de8ecfb5 1114{
1115 if (GET_CODE (reg) == CONCAT)
1116 {
1117 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1118 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1119 }
de8ecfb5 1120 else
611234b4 1121 {
1122 gcc_assert (REG_P (reg));
1123 REG_USERVAR_P (reg) = 1;
1124 }
de8ecfb5 1125}
1126
d4c332ff 1127/* Identify REG as a probable pointer register and show its alignment
1128 as ALIGN, if nonzero. */
15bbde2b 1129
1130void
35cb5232 1131mark_reg_pointer (rtx reg, int align)
15bbde2b 1132{
e61a0a7f 1133 if (! REG_POINTER (reg))
612409a6 1134 {
e61a0a7f 1135 REG_POINTER (reg) = 1;
d4c332ff 1136
612409a6 1137 if (align)
1138 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1139 }
1140 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
8b332087 1141 /* We can no-longer be sure just how aligned this pointer is. */
d4c332ff 1142 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 1143}
1144
1145/* Return 1 plus largest pseudo reg number used in the current function. */
1146
1147int
35cb5232 1148max_reg_num (void)
15bbde2b 1149{
1150 return reg_rtx_no;
1151}
1152
1153/* Return 1 + the largest label number used so far in the current function. */
1154
1155int
35cb5232 1156max_label_num (void)
15bbde2b 1157{
15bbde2b 1158 return label_num;
1159}
1160
1161/* Return first label number used in this function (if any were used). */
1162
1163int
35cb5232 1164get_first_label_num (void)
15bbde2b 1165{
1166 return first_label_num;
1167}
4ee9c684 1168
1169/* If the rtx for label was created during the expansion of a nested
1170 function, then first_label_num won't include this label number.
f0b5f617 1171 Fix this now so that array indices work later. */
4ee9c684 1172
1173void
1174maybe_set_first_label_num (rtx x)
1175{
1176 if (CODE_LABEL_NUMBER (x) < first_label_num)
1177 first_label_num = CODE_LABEL_NUMBER (x);
1178}
15bbde2b 1179\f
1180/* Return a value representing some low-order bits of X, where the number
1181 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1182 between floating-point and fixed-point values, rather, the bit
15bbde2b 1183 representation is returned.
1184
1185 This function handles the cases in common between gen_lowpart, below,
1186 and two variants in cse.c and combine.c. These are the cases that can
1187 be safely handled at all points in the compilation.
1188
1189 If this is not a case we can handle, return 0. */
1190
1191rtx
35cb5232 1192gen_lowpart_common (enum machine_mode mode, rtx x)
15bbde2b 1193{
701e46d0 1194 int msize = GET_MODE_SIZE (mode);
791172c5 1195 int xsize;
701e46d0 1196 int offset = 0;
791172c5 1197 enum machine_mode innermode;
1198
1199 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1200 so we have to make one up. Yuk. */
1201 innermode = GET_MODE (x);
971ba038 1202 if (CONST_INT_P (x)
6c799a83 1203 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
791172c5 1204 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1205 else if (innermode == VOIDmode)
24cd46a7 1206 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
48e1416a 1207
791172c5 1208 xsize = GET_MODE_SIZE (innermode);
1209
611234b4 1210 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
15bbde2b 1211
791172c5 1212 if (innermode == mode)
15bbde2b 1213 return x;
1214
1215 /* MODE must occupy no more words than the mode of X. */
791172c5 1216 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1217 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
15bbde2b 1218 return 0;
1219
9abe1e73 1220 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
cee7491d 1221 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
9abe1e73 1222 return 0;
1223
791172c5 1224 offset = subreg_lowpart_offset (mode, innermode);
15bbde2b 1225
1226 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 1227 && (GET_MODE_CLASS (mode) == MODE_INT
1228 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 1229 {
1230 /* If we are getting the low-order part of something that has been
1231 sign- or zero-extended, we can either just use the object being
1232 extended or make a narrower extension. If we want an even smaller
1233 piece than the size of the object being extended, call ourselves
1234 recursively.
1235
1236 This case is used mostly by combine and cse. */
1237
1238 if (GET_MODE (XEXP (x, 0)) == mode)
1239 return XEXP (x, 0);
791172c5 1240 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
15bbde2b 1241 return gen_lowpart_common (mode, XEXP (x, 0));
791172c5 1242 else if (msize < xsize)
3ad7bb1c 1243 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 1244 }
8ad4c111 1245 else if (GET_CODE (x) == SUBREG || REG_P (x)
791172c5 1246 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
78f1962f 1247 || CONST_DOUBLE_P (x) || CONST_INT_P (x))
791172c5 1248 return simplify_gen_subreg (mode, x, innermode, offset);
4a307dd5 1249
15bbde2b 1250 /* Otherwise, we can't do this. */
1251 return 0;
1252}
1253\f
d56d0ca2 1254rtx
35cb5232 1255gen_highpart (enum machine_mode mode, rtx x)
d56d0ca2 1256{
701e46d0 1257 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1258 rtx result;
701e46d0 1259
d56d0ca2 1260 /* This case loses if X is a subreg. To catch bugs early,
1261 complain if an invalid MODE is used even in other cases. */
611234b4 1262 gcc_assert (msize <= UNITS_PER_WORD
1263 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
701e46d0 1264
81802af6 1265 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1266 subreg_highpart_offset (mode, GET_MODE (x)));
611234b4 1267 gcc_assert (result);
48e1416a 1268
a8c36ab2 1269 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1270 the target if we have a MEM. gen_highpart must return a valid operand,
1271 emitting code if necessary to do so. */
611234b4 1272 if (MEM_P (result))
1273 {
1274 result = validize_mem (result);
1275 gcc_assert (result);
1276 }
48e1416a 1277
81802af6 1278 return result;
1279}
704fcf2b 1280
29d56731 1281/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1282 be VOIDmode constant. */
1283rtx
35cb5232 1284gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
704fcf2b 1285{
1286 if (GET_MODE (exp) != VOIDmode)
1287 {
611234b4 1288 gcc_assert (GET_MODE (exp) == innermode);
704fcf2b 1289 return gen_highpart (outermode, exp);
1290 }
1291 return simplify_gen_subreg (outermode, exp, innermode,
1292 subreg_highpart_offset (outermode, innermode));
1293}
d4c5e26d 1294
80c70e76 1295/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
10ef59ac 1296
81802af6 1297unsigned int
35cb5232 1298subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
81802af6 1299{
1300 unsigned int offset = 0;
1301 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1302
81802af6 1303 if (difference > 0)
d56d0ca2 1304 {
81802af6 1305 if (WORDS_BIG_ENDIAN)
1306 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1307 if (BYTES_BIG_ENDIAN)
1308 offset += difference % UNITS_PER_WORD;
d56d0ca2 1309 }
701e46d0 1310
81802af6 1311 return offset;
d56d0ca2 1312}
64ab453f 1313
81802af6 1314/* Return offset in bytes to get OUTERMODE high part
1315 of the value in mode INNERMODE stored in memory in target format. */
1316unsigned int
35cb5232 1317subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
64ab453f 1318{
1319 unsigned int offset = 0;
1320 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1321
611234b4 1322 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
81802af6 1323
64ab453f 1324 if (difference > 0)
1325 {
81802af6 1326 if (! WORDS_BIG_ENDIAN)
64ab453f 1327 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1328 if (! BYTES_BIG_ENDIAN)
64ab453f 1329 offset += difference % UNITS_PER_WORD;
1330 }
1331
81802af6 1332 return offset;
64ab453f 1333}
d56d0ca2 1334
15bbde2b 1335/* Return 1 iff X, assumed to be a SUBREG,
1336 refers to the least significant part of its containing reg.
1337 If X is not a SUBREG, always return 1 (it is its own low part!). */
1338
1339int
b7bf20db 1340subreg_lowpart_p (const_rtx x)
15bbde2b 1341{
1342 if (GET_CODE (x) != SUBREG)
1343 return 1;
7e14c1bf 1344 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1345 return 0;
15bbde2b 1346
81802af6 1347 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1348 == SUBREG_BYTE (x));
15bbde2b 1349}
b537bfdb 1350
1351/* Return true if X is a paradoxical subreg, false otherwise. */
1352bool
1353paradoxical_subreg_p (const_rtx x)
1354{
1355 if (GET_CODE (x) != SUBREG)
1356 return false;
1357 return (GET_MODE_PRECISION (GET_MODE (x))
1358 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1359}
15bbde2b 1360\f
701e46d0 1361/* Return subword OFFSET of operand OP.
1362 The word number, OFFSET, is interpreted as the word number starting
1363 at the low-order address. OFFSET 0 is the low-order word if not
1364 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1365
1366 If we cannot extract the required word, we return zero. Otherwise,
1367 an rtx corresponding to the requested word will be returned.
1368
1369 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1370 reload has completed, a valid address will always be returned. After
1371 reload, if a valid address cannot be returned, we return zero.
1372
1373 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1374 it is the responsibility of the caller.
1375
1376 MODE is the mode of OP in case it is a CONST_INT.
1377
1378 ??? This is still rather broken for some cases. The problem for the
1379 moment is that all callers of this thing provide no 'goal mode' to
1380 tell us to work with. This exists because all callers were written
84e81e84 1381 in a word based SUBREG world.
1382 Now use of this function can be deprecated by simplify_subreg in most
1383 cases.
1384 */
701e46d0 1385
1386rtx
35cb5232 1387operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
701e46d0 1388{
1389 if (mode == VOIDmode)
1390 mode = GET_MODE (op);
1391
611234b4 1392 gcc_assert (mode != VOIDmode);
701e46d0 1393
6312a35e 1394 /* If OP is narrower than a word, fail. */
701e46d0 1395 if (mode != BLKmode
1396 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1397 return 0;
1398
6312a35e 1399 /* If we want a word outside OP, return zero. */
701e46d0 1400 if (mode != BLKmode
1401 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1402 return const0_rtx;
1403
701e46d0 1404 /* Form a new MEM at the requested address. */
e16ceb8e 1405 if (MEM_P (op))
701e46d0 1406 {
9ce37fa7 1407 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1408
e4e86ec5 1409 if (! validate_address)
9ce37fa7 1410 return new_rtx;
e4e86ec5 1411
1412 else if (reload_completed)
701e46d0 1413 {
bd1a81f7 1414 if (! strict_memory_address_addr_space_p (word_mode,
1415 XEXP (new_rtx, 0),
1416 MEM_ADDR_SPACE (op)))
e4e86ec5 1417 return 0;
701e46d0 1418 }
e4e86ec5 1419 else
9ce37fa7 1420 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
701e46d0 1421 }
1422
84e81e84 1423 /* Rest can be handled by simplify_subreg. */
1424 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1425}
1426
89f18f73 1427/* Similar to `operand_subword', but never return 0. If we can't
1428 extract the required subword, put OP into a register and try again.
1429 The second attempt must succeed. We always validate the address in
1430 this case.
15bbde2b 1431
1432 MODE is the mode of OP, in case it is CONST_INT. */
1433
1434rtx
35cb5232 1435operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
15bbde2b 1436{
701e46d0 1437 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1438
1439 if (result)
1440 return result;
1441
1442 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1443 {
1444 /* If this is a register which can not be accessed by words, copy it
1445 to a pseudo register. */
8ad4c111 1446 if (REG_P (op))
ac825d29 1447 op = copy_to_reg (op);
1448 else
1449 op = force_reg (mode, op);
1450 }
15bbde2b 1451
701e46d0 1452 result = operand_subword (op, offset, 1, mode);
611234b4 1453 gcc_assert (result);
15bbde2b 1454
1455 return result;
1456}
1457\f
b3ff8d90 1458/* Returns 1 if both MEM_EXPR can be considered equal
1459 and 0 otherwise. */
1460
1461int
52d07779 1462mem_expr_equal_p (const_tree expr1, const_tree expr2)
b3ff8d90 1463{
1464 if (expr1 == expr2)
1465 return 1;
1466
1467 if (! expr1 || ! expr2)
1468 return 0;
1469
1470 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1471 return 0;
1472
3a443843 1473 return operand_equal_p (expr1, expr2, 0);
b3ff8d90 1474}
1475
ad0a178f 1476/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1477 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1478 -1 if not known. */
1479
1480int
7cfdc2f0 1481get_mem_align_offset (rtx mem, unsigned int align)
ad0a178f 1482{
1483 tree expr;
1484 unsigned HOST_WIDE_INT offset;
1485
1486 /* This function can't use
da443c27 1487 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
98ab9e8f 1488 || (MAX (MEM_ALIGN (mem),
957d0361 1489 MAX (align, get_object_alignment (MEM_EXPR (mem))))
ad0a178f 1490 < align))
1491 return -1;
1492 else
da443c27 1493 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
ad0a178f 1494 for two reasons:
1495 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1496 for <variable>. get_inner_reference doesn't handle it and
1497 even if it did, the alignment in that case needs to be determined
1498 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1499 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1500 isn't sufficiently aligned, the object it is in might be. */
1501 gcc_assert (MEM_P (mem));
1502 expr = MEM_EXPR (mem);
da443c27 1503 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
ad0a178f 1504 return -1;
1505
da443c27 1506 offset = MEM_OFFSET (mem);
ad0a178f 1507 if (DECL_P (expr))
1508 {
1509 if (DECL_ALIGN (expr) < align)
1510 return -1;
1511 }
1512 else if (INDIRECT_REF_P (expr))
1513 {
1514 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1515 return -1;
1516 }
1517 else if (TREE_CODE (expr) == COMPONENT_REF)
1518 {
1519 while (1)
1520 {
1521 tree inner = TREE_OPERAND (expr, 0);
1522 tree field = TREE_OPERAND (expr, 1);
1523 tree byte_offset = component_ref_field_offset (expr);
1524 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1525
1526 if (!byte_offset
1527 || !host_integerp (byte_offset, 1)
1528 || !host_integerp (bit_offset, 1))
1529 return -1;
1530
1531 offset += tree_low_cst (byte_offset, 1);
1532 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1533
1534 if (inner == NULL_TREE)
1535 {
1536 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1537 < (unsigned int) align)
1538 return -1;
1539 break;
1540 }
1541 else if (DECL_P (inner))
1542 {
1543 if (DECL_ALIGN (inner) < align)
1544 return -1;
1545 break;
1546 }
1547 else if (TREE_CODE (inner) != COMPONENT_REF)
1548 return -1;
1549 expr = inner;
1550 }
1551 }
1552 else
1553 return -1;
1554
1555 return offset & ((align / BITS_PER_UNIT) - 1);
1556}
1557
310b57a1 1558/* Given REF (a MEM) and T, either the type of X or the expression
c6259b83 1559 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1560 if we are making a new object of this type. BITPOS is nonzero if
1561 there is an offset outstanding on T that will be applied later. */
c6259b83 1562
1563void
35cb5232 1564set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1565 HOST_WIDE_INT bitpos)
c6259b83 1566{
6f717f77 1567 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1568 tree type;
d72886b5 1569 struct mem_attrs attrs, *defattrs, *refattrs;
3f06bd1b 1570 addr_space_t as;
c6259b83 1571
1572 /* It can happen that type_for_mode was given a mode for which there
1573 is no language-level type. In which case it returns NULL, which
1574 we can see here. */
1575 if (t == NULL_TREE)
1576 return;
1577
1578 type = TYPE_P (t) ? t : TREE_TYPE (t);
4ccffa39 1579 if (type == error_mark_node)
1580 return;
c6259b83 1581
c6259b83 1582 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1583 wrong answer, as it assumes that DECL_RTL already has the right alias
1584 info. Callers should not set DECL_RTL until after the call to
1585 set_mem_attributes. */
611234b4 1586 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
c6259b83 1587
d72886b5 1588 memset (&attrs, 0, sizeof (attrs));
1589
96216d37 1590 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1591 front-end routine) and use it. */
d72886b5 1592 attrs.alias = get_alias_set (t);
c6259b83 1593
fbc6244b 1594 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
8d350e69 1595 MEM_POINTER (ref) = POINTER_TYPE_P (type);
c6259b83 1596
d8dccfe9 1597 /* Default values from pre-existing memory attributes if present. */
d72886b5 1598 refattrs = MEM_ATTRS (ref);
1599 if (refattrs)
d8dccfe9 1600 {
1601 /* ??? Can this ever happen? Calling this routine on a MEM that
1602 already carries memory attributes should probably be invalid. */
d72886b5 1603 attrs.expr = refattrs->expr;
6d58bcba 1604 attrs.offset_known_p = refattrs->offset_known_p;
d72886b5 1605 attrs.offset = refattrs->offset;
6d58bcba 1606 attrs.size_known_p = refattrs->size_known_p;
d72886b5 1607 attrs.size = refattrs->size;
1608 attrs.align = refattrs->align;
d8dccfe9 1609 }
1610
1611 /* Otherwise, default values from the mode of the MEM reference. */
d72886b5 1612 else
d8dccfe9 1613 {
d72886b5 1614 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1615 gcc_assert (!defattrs->expr);
6d58bcba 1616 gcc_assert (!defattrs->offset_known_p);
d72886b5 1617
d8dccfe9 1618 /* Respect mode size. */
6d58bcba 1619 attrs.size_known_p = defattrs->size_known_p;
d72886b5 1620 attrs.size = defattrs->size;
d8dccfe9 1621 /* ??? Is this really necessary? We probably should always get
1622 the size from the type below. */
1623
1624 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1625 if T is an object, always compute the object alignment below. */
d72886b5 1626 if (TYPE_P (t))
1627 attrs.align = defattrs->align;
1628 else
1629 attrs.align = BITS_PER_UNIT;
d8dccfe9 1630 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1631 e.g. if the type carries an alignment attribute. Should we be
1632 able to simply always use TYPE_ALIGN? */
1633 }
1634
a9d9ab08 1635 /* We can set the alignment from the type if we are making an object,
1636 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
679e0056 1637 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
d72886b5 1638 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
679e0056 1639
182cf5a9 1640 else if (TREE_CODE (t) == MEM_REF)
1641 {
679e0056 1642 tree op0 = TREE_OPERAND (t, 0);
06a807aa 1643 if (TREE_CODE (op0) == ADDR_EXPR
1644 && (DECL_P (TREE_OPERAND (op0, 0))
1645 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
182cf5a9 1646 {
06a807aa 1647 if (DECL_P (TREE_OPERAND (op0, 0)))
d72886b5 1648 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
06a807aa 1649 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1650 {
d72886b5 1651 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
182cf5a9 1652#ifdef CONSTANT_ALIGNMENT
d72886b5 1653 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1654 attrs.align);
182cf5a9 1655#endif
06a807aa 1656 }
1657 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1658 {
1659 unsigned HOST_WIDE_INT ioff
1660 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1661 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
d72886b5 1662 attrs.align = MIN (aoff, attrs.align);
06a807aa 1663 }
182cf5a9 1664 }
1665 else
936dedf3 1666 /* ??? This isn't fully correct, we can't set the alignment from the
1667 type in all cases. */
d72886b5 1668 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
182cf5a9 1669 }
679e0056 1670
559c9389 1671 else if (TREE_CODE (t) == TARGET_MEM_REF)
1672 /* ??? This isn't fully correct, we can't set the alignment from the
1673 type in all cases. */
d72886b5 1674 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
559c9389 1675
96216d37 1676 /* If the size is known, we can set that. */
1677 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
6d58bcba 1678 {
1679 attrs.size_known_p = true;
1680 attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1681 }
96216d37 1682
579bccf9 1683 /* If T is not a type, we may be able to deduce some more information about
1684 the expression. */
1685 if (! TYPE_P (t))
2a631e19 1686 {
ae2dd339 1687 tree base;
698537d1 1688 bool align_computed = false;
b04fab2a 1689
2a631e19 1690 if (TREE_THIS_VOLATILE (t))
1691 MEM_VOLATILE_P (ref) = 1;
c6259b83 1692
3c00f11c 1693 /* Now remove any conversions: they don't change what the underlying
1694 object is. Likewise for SAVE_EXPR. */
72dd6141 1695 while (CONVERT_EXPR_P (t)
3c00f11c 1696 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1697 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1698 t = TREE_OPERAND (t, 0);
1699
73eb0a09 1700 /* Note whether this expression can trap. */
1701 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1702
1703 base = get_base_address (t);
3f06bd1b 1704 if (base)
1705 {
1706 if (DECL_P (base)
1707 && TREE_READONLY (base)
1708 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1709 && !TREE_THIS_VOLATILE (base))
1710 MEM_READONLY_P (ref) = 1;
1711
1712 /* Mark static const strings readonly as well. */
1713 if (TREE_CODE (base) == STRING_CST
1714 && TREE_READONLY (base)
1715 && TREE_STATIC (base))
1716 MEM_READONLY_P (ref) = 1;
1717
1718 if (TREE_CODE (base) == MEM_REF
1719 || TREE_CODE (base) == TARGET_MEM_REF)
1720 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1721 0))));
1722 else
1723 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1724 }
1725 else
1726 as = TYPE_ADDR_SPACE (type);
cab98a0d 1727
2b02580f 1728 /* If this expression uses it's parent's alias set, mark it such
1729 that we won't change it. */
1730 if (component_uses_parent_alias_set (t))
5cc193e7 1731 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1732
2a631e19 1733 /* If this is a decl, set the attributes of the MEM from it. */
1734 if (DECL_P (t))
1735 {
d72886b5 1736 attrs.expr = t;
6d58bcba 1737 attrs.offset_known_p = true;
1738 attrs.offset = 0;
6f717f77 1739 apply_bitpos = bitpos;
6d58bcba 1740 if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1741 {
1742 attrs.size_known_p = true;
1743 attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1744 }
1745 else
1746 attrs.size_known_p = false;
d72886b5 1747 attrs.align = DECL_ALIGN (t);
698537d1 1748 align_computed = true;
2a631e19 1749 }
1750
ecfe4ca9 1751 /* If this is a constant, we know the alignment. */
ce45a448 1752 else if (CONSTANT_CLASS_P (t))
42f6f447 1753 {
d72886b5 1754 attrs.align = TYPE_ALIGN (type);
42f6f447 1755#ifdef CONSTANT_ALIGNMENT
d72886b5 1756 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
42f6f447 1757#endif
698537d1 1758 align_computed = true;
42f6f447 1759 }
b10dbbca 1760
1761 /* If this is a field reference and not a bit-field, record it. */
f0b5f617 1762 /* ??? There is some information that can be gleaned from bit-fields,
b10dbbca 1763 such as the word offset in the structure that might be modified.
1764 But skip it for now. */
1765 else if (TREE_CODE (t) == COMPONENT_REF
1766 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1767 {
d72886b5 1768 attrs.expr = t;
6d58bcba 1769 attrs.offset_known_p = true;
1770 attrs.offset = 0;
6f717f77 1771 apply_bitpos = bitpos;
b10dbbca 1772 /* ??? Any reason the field size would be different than
1773 the size we got from the type? */
1774 }
1775
1776 /* If this is an array reference, look for an outer field reference. */
1777 else if (TREE_CODE (t) == ARRAY_REF)
1778 {
1779 tree off_tree = size_zero_node;
6b039979 1780 /* We can't modify t, because we use it at the end of the
1781 function. */
1782 tree t2 = t;
b10dbbca 1783
1784 do
1785 {
6b039979 1786 tree index = TREE_OPERAND (t2, 1);
6374121b 1787 tree low_bound = array_ref_low_bound (t2);
1788 tree unit_size = array_ref_element_size (t2);
97f8ce30 1789
1790 /* We assume all arrays have sizes that are a multiple of a byte.
1791 First subtract the lower bound, if any, in the type of the
6374121b 1792 index, then convert to sizetype and multiply by the size of
1793 the array element. */
1794 if (! integer_zerop (low_bound))
faa43f85 1795 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1796 index, low_bound);
97f8ce30 1797
6374121b 1798 off_tree = size_binop (PLUS_EXPR,
535664e3 1799 size_binop (MULT_EXPR,
1800 fold_convert (sizetype,
1801 index),
6374121b 1802 unit_size),
1803 off_tree);
6b039979 1804 t2 = TREE_OPERAND (t2, 0);
b10dbbca 1805 }
6b039979 1806 while (TREE_CODE (t2) == ARRAY_REF);
b10dbbca 1807
6b039979 1808 if (DECL_P (t2))
2d8fe5d0 1809 {
d72886b5 1810 attrs.expr = t2;
6d58bcba 1811 attrs.offset_known_p = false;
2d8fe5d0 1812 if (host_integerp (off_tree, 1))
0318dc09 1813 {
1814 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1815 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
d72886b5 1816 attrs.align = DECL_ALIGN (t2);
1817 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1818 attrs.align = aoff;
698537d1 1819 align_computed = true;
6d58bcba 1820 attrs.offset_known_p = true;
1821 attrs.offset = ioff;
6f717f77 1822 apply_bitpos = bitpos;
0318dc09 1823 }
2d8fe5d0 1824 }
6b039979 1825 else if (TREE_CODE (t2) == COMPONENT_REF)
b10dbbca 1826 {
d72886b5 1827 attrs.expr = t2;
6d58bcba 1828 attrs.offset_known_p = false;
b10dbbca 1829 if (host_integerp (off_tree, 1))
6f717f77 1830 {
6d58bcba 1831 attrs.offset_known_p = true;
1832 attrs.offset = tree_low_cst (off_tree, 1);
6f717f77 1833 apply_bitpos = bitpos;
1834 }
b10dbbca 1835 /* ??? Any reason the field size would be different than
1836 the size we got from the type? */
1837 }
2d8fe5d0 1838 }
1839
6d72287b 1840 /* If this is an indirect reference, record it. */
182cf5a9 1841 else if (TREE_CODE (t) == MEM_REF
5d9de213 1842 || TREE_CODE (t) == TARGET_MEM_REF)
6d72287b 1843 {
d72886b5 1844 attrs.expr = t;
6d58bcba 1845 attrs.offset_known_p = true;
1846 attrs.offset = 0;
6d72287b 1847 apply_bitpos = bitpos;
1848 }
1849
957d0361 1850 if (!align_computed)
698537d1 1851 {
957d0361 1852 unsigned int obj_align = get_object_alignment (t);
d72886b5 1853 attrs.align = MAX (attrs.align, obj_align);
698537d1 1854 }
2a631e19 1855 }
3f06bd1b 1856 else
1857 as = TYPE_ADDR_SPACE (type);
2a631e19 1858
e2e205b3 1859 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 1860 bit position offset. Similarly, increase the size of the accessed
1861 object to contain the negative offset. */
6f717f77 1862 if (apply_bitpos)
595f1461 1863 {
6d58bcba 1864 gcc_assert (attrs.offset_known_p);
1865 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1866 if (attrs.size_known_p)
1867 attrs.size += apply_bitpos / BITS_PER_UNIT;
595f1461 1868 }
6f717f77 1869
2a631e19 1870 /* Now set the attributes we computed above. */
3f06bd1b 1871 attrs.addrspace = as;
d72886b5 1872 set_mem_attrs (ref, &attrs);
c6259b83 1873}
1874
6f717f77 1875void
35cb5232 1876set_mem_attributes (rtx ref, tree t, int objectp)
6f717f77 1877{
1878 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1879}
1880
c6259b83 1881/* Set the alias set of MEM to SET. */
1882
1883void
32c2fdea 1884set_mem_alias_set (rtx mem, alias_set_type set)
c6259b83 1885{
d72886b5 1886 struct mem_attrs attrs;
1887
c6259b83 1888 /* If the new and old alias sets don't conflict, something is wrong. */
1b4345f7 1889 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
d72886b5 1890 attrs = *get_mem_attrs (mem);
1891 attrs.alias = set;
1892 set_mem_attrs (mem, &attrs);
bd1a81f7 1893}
1894
1895/* Set the address space of MEM to ADDRSPACE (target-defined). */
1896
1897void
1898set_mem_addr_space (rtx mem, addr_space_t addrspace)
1899{
d72886b5 1900 struct mem_attrs attrs;
1901
1902 attrs = *get_mem_attrs (mem);
1903 attrs.addrspace = addrspace;
1904 set_mem_attrs (mem, &attrs);
c6259b83 1905}
96216d37 1906
1c4512da 1907/* Set the alignment of MEM to ALIGN bits. */
96216d37 1908
1909void
35cb5232 1910set_mem_align (rtx mem, unsigned int align)
96216d37 1911{
d72886b5 1912 struct mem_attrs attrs;
1913
1914 attrs = *get_mem_attrs (mem);
1915 attrs.align = align;
1916 set_mem_attrs (mem, &attrs);
96216d37 1917}
278fe152 1918
b10dbbca 1919/* Set the expr for MEM to EXPR. */
278fe152 1920
1921void
35cb5232 1922set_mem_expr (rtx mem, tree expr)
278fe152 1923{
d72886b5 1924 struct mem_attrs attrs;
1925
1926 attrs = *get_mem_attrs (mem);
1927 attrs.expr = expr;
1928 set_mem_attrs (mem, &attrs);
278fe152 1929}
b10dbbca 1930
1931/* Set the offset of MEM to OFFSET. */
1932
1933void
da443c27 1934set_mem_offset (rtx mem, HOST_WIDE_INT offset)
b10dbbca 1935{
d72886b5 1936 struct mem_attrs attrs;
1937
1938 attrs = *get_mem_attrs (mem);
6d58bcba 1939 attrs.offset_known_p = true;
1940 attrs.offset = offset;
da443c27 1941 set_mem_attrs (mem, &attrs);
1942}
1943
1944/* Clear the offset of MEM. */
1945
1946void
1947clear_mem_offset (rtx mem)
1948{
1949 struct mem_attrs attrs;
1950
1951 attrs = *get_mem_attrs (mem);
6d58bcba 1952 attrs.offset_known_p = false;
d72886b5 1953 set_mem_attrs (mem, &attrs);
f0500469 1954}
1955
1956/* Set the size of MEM to SIZE. */
1957
1958void
5b2a69fa 1959set_mem_size (rtx mem, HOST_WIDE_INT size)
f0500469 1960{
d72886b5 1961 struct mem_attrs attrs;
1962
1963 attrs = *get_mem_attrs (mem);
6d58bcba 1964 attrs.size_known_p = true;
1965 attrs.size = size;
5b2a69fa 1966 set_mem_attrs (mem, &attrs);
1967}
1968
1969/* Clear the size of MEM. */
1970
1971void
1972clear_mem_size (rtx mem)
1973{
1974 struct mem_attrs attrs;
1975
1976 attrs = *get_mem_attrs (mem);
6d58bcba 1977 attrs.size_known_p = false;
d72886b5 1978 set_mem_attrs (mem, &attrs);
b10dbbca 1979}
c6259b83 1980\f
96216d37 1981/* Return a memory reference like MEMREF, but with its mode changed to MODE
1982 and its address changed to ADDR. (VOIDmode means don't change the mode.
1983 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1984 returned memory location is required to be valid. The memory
1985 attributes are not changed. */
15bbde2b 1986
96216d37 1987static rtx
35cb5232 1988change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
15bbde2b 1989{
bd1a81f7 1990 addr_space_t as;
9ce37fa7 1991 rtx new_rtx;
15bbde2b 1992
611234b4 1993 gcc_assert (MEM_P (memref));
bd1a81f7 1994 as = MEM_ADDR_SPACE (memref);
15bbde2b 1995 if (mode == VOIDmode)
1996 mode = GET_MODE (memref);
1997 if (addr == 0)
1998 addr = XEXP (memref, 0);
3988ef8b 1999 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
bd1a81f7 2000 && (!validate || memory_address_addr_space_p (mode, addr, as)))
3988ef8b 2001 return memref;
15bbde2b 2002
e4e86ec5 2003 if (validate)
15bbde2b 2004 {
e4e86ec5 2005 if (reload_in_progress || reload_completed)
bd1a81f7 2006 gcc_assert (memory_address_addr_space_p (mode, addr, as));
e4e86ec5 2007 else
bd1a81f7 2008 addr = memory_address_addr_space (mode, addr, as);
15bbde2b 2009 }
d823ba47 2010
e8976cd7 2011 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2012 return memref;
2013
9ce37fa7 2014 new_rtx = gen_rtx_MEM (mode, addr);
2015 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2016 return new_rtx;
15bbde2b 2017}
537ffcfc 2018
96216d37 2019/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2020 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 2021
2022rtx
35cb5232 2023change_address (rtx memref, enum machine_mode mode, rtx addr)
e513d163 2024{
d72886b5 2025 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
9ce37fa7 2026 enum machine_mode mmode = GET_MODE (new_rtx);
d72886b5 2027 struct mem_attrs attrs, *defattrs;
0ab96142 2028
d72886b5 2029 attrs = *get_mem_attrs (memref);
2030 defattrs = mode_mem_attrs[(int) mmode];
6d58bcba 2031 attrs.expr = NULL_TREE;
2032 attrs.offset_known_p = false;
2033 attrs.size_known_p = defattrs->size_known_p;
d72886b5 2034 attrs.size = defattrs->size;
2035 attrs.align = defattrs->align;
6cc60c4d 2036
d28edf0d 2037 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2038 if (new_rtx == memref)
0ab96142 2039 {
d72886b5 2040 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
9ce37fa7 2041 return new_rtx;
0ab96142 2042
9ce37fa7 2043 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2044 MEM_COPY_ATTRIBUTES (new_rtx, memref);
0ab96142 2045 }
d28edf0d 2046
d72886b5 2047 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2048 return new_rtx;
e513d163 2049}
537ffcfc 2050
96216d37 2051/* Return a memory reference like MEMREF, but with its mode changed
2052 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 2053 nonzero, the memory address is forced to be valid.
2054 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2055 and caller is responsible for adjusting MEMREF base register. */
e4e86ec5 2056
2057rtx
35cb5232 2058adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2059 int validate, int adjust)
e4e86ec5 2060{
fb257ae6 2061 rtx addr = XEXP (memref, 0);
9ce37fa7 2062 rtx new_rtx;
d72886b5 2063 enum machine_mode address_mode;
cfb75cdf 2064 int pbits;
d72886b5 2065 struct mem_attrs attrs, *defattrs;
2066 unsigned HOST_WIDE_INT max_align;
2067
2068 attrs = *get_mem_attrs (memref);
fb257ae6 2069
d28edf0d 2070 /* If there are no changes, just return the original memory reference. */
2071 if (mode == GET_MODE (memref) && !offset
d72886b5 2072 && (!validate || memory_address_addr_space_p (mode, addr,
2073 attrs.addrspace)))
d28edf0d 2074 return memref;
2075
e36c3d58 2076 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 2077 This may happen even if offset is nonzero -- consider
e36c3d58 2078 (plus (plus reg reg) const_int) -- so do this always. */
2079 addr = copy_rtx (addr);
2080
cfb75cdf 2081 /* Convert a possibly large offset to a signed value within the
2082 range of the target address space. */
87cf5753 2083 address_mode = get_address_mode (memref);
98155838 2084 pbits = GET_MODE_BITSIZE (address_mode);
cfb75cdf 2085 if (HOST_BITS_PER_WIDE_INT > pbits)
2086 {
2087 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2088 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2089 >> shift);
2090 }
2091
cd358719 2092 if (adjust)
2093 {
2094 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2095 object, we can merge it into the LO_SUM. */
2096 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2097 && offset >= 0
2098 && (unsigned HOST_WIDE_INT) offset
2099 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
98155838 2100 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
29c05e22 2101 plus_constant (address_mode,
2102 XEXP (addr, 1), offset));
cd358719 2103 else
29c05e22 2104 addr = plus_constant (address_mode, addr, offset);
cd358719 2105 }
fb257ae6 2106
9ce37fa7 2107 new_rtx = change_address_1 (memref, mode, addr, validate);
96216d37 2108
e077413c 2109 /* If the address is a REG, change_address_1 rightfully returns memref,
2110 but this would destroy memref's MEM_ATTRS. */
2111 if (new_rtx == memref && offset != 0)
2112 new_rtx = copy_rtx (new_rtx);
2113
96216d37 2114 /* Compute the new values of the memory attributes due to this adjustment.
2115 We add the offsets and update the alignment. */
6d58bcba 2116 if (attrs.offset_known_p)
2117 attrs.offset += offset;
96216d37 2118
b8098e5b 2119 /* Compute the new alignment by taking the MIN of the alignment and the
2120 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2121 if zero. */
2122 if (offset != 0)
d72886b5 2123 {
2124 max_align = (offset & -offset) * BITS_PER_UNIT;
2125 attrs.align = MIN (attrs.align, max_align);
2126 }
96216d37 2127
5cc193e7 2128 /* We can compute the size in a number of ways. */
d72886b5 2129 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
6d58bcba 2130 if (defattrs->size_known_p)
2131 {
2132 attrs.size_known_p = true;
2133 attrs.size = defattrs->size;
2134 }
2135 else if (attrs.size_known_p)
2136 attrs.size -= offset;
5cc193e7 2137
d72886b5 2138 set_mem_attrs (new_rtx, &attrs);
96216d37 2139
2140 /* At some point, we should validate that this offset is within the object,
2141 if all the appropriate values are known. */
9ce37fa7 2142 return new_rtx;
e4e86ec5 2143}
2144
bf42c62d 2145/* Return a memory reference like MEMREF, but with its mode changed
2146 to MODE and its address changed to ADDR, which is assumed to be
f0b5f617 2147 MEMREF offset by OFFSET bytes. If VALIDATE is
bf42c62d 2148 nonzero, the memory address is forced to be valid. */
2149
2150rtx
35cb5232 2151adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2152 HOST_WIDE_INT offset, int validate)
bf42c62d 2153{
2154 memref = change_address_1 (memref, VOIDmode, addr, validate);
2155 return adjust_address_1 (memref, mode, offset, validate, 0);
2156}
2157
2a631e19 2158/* Return a memory reference like MEMREF, but whose address is changed by
2159 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2160 known to be in OFFSET (possibly 1). */
fcdc122e 2161
2162rtx
35cb5232 2163offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
fcdc122e 2164{
9ce37fa7 2165 rtx new_rtx, addr = XEXP (memref, 0);
d72886b5 2166 enum machine_mode address_mode;
6d58bcba 2167 struct mem_attrs attrs, *defattrs;
fac6aae6 2168
d72886b5 2169 attrs = *get_mem_attrs (memref);
87cf5753 2170 address_mode = get_address_mode (memref);
98155838 2171 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2172
d4c5e26d 2173 /* At this point we don't know _why_ the address is invalid. It
917bbcab 2174 could have secondary memory references, multiplies or anything.
fac6aae6 2175
2176 However, if we did go and rearrange things, we can wind up not
2177 being able to recognize the magic around pic_offset_table_rtx.
2178 This stuff is fragile, and is yet another example of why it is
2179 bad to expose PIC machinery too early. */
d72886b5 2180 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2181 attrs.addrspace)
fac6aae6 2182 && GET_CODE (addr) == PLUS
2183 && XEXP (addr, 0) == pic_offset_table_rtx)
2184 {
2185 addr = force_reg (GET_MODE (addr), addr);
98155838 2186 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
fac6aae6 2187 }
2188
9ce37fa7 2189 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2190 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
fcdc122e 2191
d28edf0d 2192 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2193 if (new_rtx == memref)
2194 return new_rtx;
d28edf0d 2195
fcdc122e 2196 /* Update the alignment to reflect the offset. Reset the offset, which
2197 we don't know. */
6d58bcba 2198 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2199 attrs.offset_known_p = false;
2200 attrs.size_known_p = defattrs->size_known_p;
2201 attrs.size = defattrs->size;
d72886b5 2202 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2203 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2204 return new_rtx;
fcdc122e 2205}
d4c5e26d 2206
537ffcfc 2207/* Return a memory reference like MEMREF, but with its address changed to
2208 ADDR. The caller is asserting that the actual piece of memory pointed
2209 to is the same, just the form of the address is being changed, such as
2210 by putting something into a register. */
2211
2212rtx
35cb5232 2213replace_equiv_address (rtx memref, rtx addr)
537ffcfc 2214{
96216d37 2215 /* change_address_1 copies the memory attribute structure without change
2216 and that's exactly what we want here. */
ecfe4ca9 2217 update_temp_slot_address (XEXP (memref, 0), addr);
96216d37 2218 return change_address_1 (memref, VOIDmode, addr, 1);
537ffcfc 2219}
96216d37 2220
e4e86ec5 2221/* Likewise, but the reference is not required to be valid. */
2222
2223rtx
35cb5232 2224replace_equiv_address_nv (rtx memref, rtx addr)
e4e86ec5 2225{
e4e86ec5 2226 return change_address_1 (memref, VOIDmode, addr, 0);
2227}
8259ab07 2228
2229/* Return a memory reference like MEMREF, but with its mode widened to
2230 MODE and offset by OFFSET. This would be used by targets that e.g.
2231 cannot issue QImode memory operations and have to use SImode memory
2232 operations plus masking logic. */
2233
2234rtx
35cb5232 2235widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
8259ab07 2236{
9ce37fa7 2237 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
d72886b5 2238 struct mem_attrs attrs;
8259ab07 2239 unsigned int size = GET_MODE_SIZE (mode);
2240
d28edf0d 2241 /* If there are no changes, just return the original memory reference. */
9ce37fa7 2242 if (new_rtx == memref)
2243 return new_rtx;
d28edf0d 2244
d72886b5 2245 attrs = *get_mem_attrs (new_rtx);
2246
8259ab07 2247 /* If we don't know what offset we were at within the expression, then
2248 we can't know if we've overstepped the bounds. */
6d58bcba 2249 if (! attrs.offset_known_p)
d72886b5 2250 attrs.expr = NULL_TREE;
8259ab07 2251
d72886b5 2252 while (attrs.expr)
8259ab07 2253 {
d72886b5 2254 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
8259ab07 2255 {
d72886b5 2256 tree field = TREE_OPERAND (attrs.expr, 1);
2257 tree offset = component_ref_field_offset (attrs.expr);
8259ab07 2258
2259 if (! DECL_SIZE_UNIT (field))
2260 {
d72886b5 2261 attrs.expr = NULL_TREE;
8259ab07 2262 break;
2263 }
2264
2265 /* Is the field at least as large as the access? If so, ok,
2266 otherwise strip back to the containing structure. */
8359cfb4 2267 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2268 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
6d58bcba 2269 && attrs.offset >= 0)
8259ab07 2270 break;
2271
6374121b 2272 if (! host_integerp (offset, 1))
8259ab07 2273 {
d72886b5 2274 attrs.expr = NULL_TREE;
8259ab07 2275 break;
2276 }
2277
d72886b5 2278 attrs.expr = TREE_OPERAND (attrs.expr, 0);
6d58bcba 2279 attrs.offset += tree_low_cst (offset, 1);
2280 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2281 / BITS_PER_UNIT);
8259ab07 2282 }
2283 /* Similarly for the decl. */
d72886b5 2284 else if (DECL_P (attrs.expr)
2285 && DECL_SIZE_UNIT (attrs.expr)
2286 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2287 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
6d58bcba 2288 && (! attrs.offset_known_p || attrs.offset >= 0))
8259ab07 2289 break;
2290 else
2291 {
2292 /* The widened memory access overflows the expression, which means
2293 that it could alias another expression. Zap it. */
d72886b5 2294 attrs.expr = NULL_TREE;
8259ab07 2295 break;
2296 }
2297 }
2298
d72886b5 2299 if (! attrs.expr)
6d58bcba 2300 attrs.offset_known_p = false;
8259ab07 2301
2302 /* The widened memory may alias other stuff, so zap the alias set. */
2303 /* ??? Maybe use get_alias_set on any remaining expression. */
d72886b5 2304 attrs.alias = 0;
6d58bcba 2305 attrs.size_known_p = true;
2306 attrs.size = size;
d72886b5 2307 set_mem_attrs (new_rtx, &attrs);
9ce37fa7 2308 return new_rtx;
8259ab07 2309}
15bbde2b 2310\f
ac681e84 2311/* A fake decl that is used as the MEM_EXPR of spill slots. */
2312static GTY(()) tree spill_slot_decl;
2313
58029e61 2314tree
2315get_spill_slot_decl (bool force_build_p)
ac681e84 2316{
2317 tree d = spill_slot_decl;
2318 rtx rd;
d72886b5 2319 struct mem_attrs attrs;
ac681e84 2320
58029e61 2321 if (d || !force_build_p)
ac681e84 2322 return d;
2323
e60a6f7b 2324 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2325 VAR_DECL, get_identifier ("%sfp"), void_type_node);
ac681e84 2326 DECL_ARTIFICIAL (d) = 1;
2327 DECL_IGNORED_P (d) = 1;
2328 TREE_USED (d) = 1;
ac681e84 2329 spill_slot_decl = d;
2330
2331 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2332 MEM_NOTRAP_P (rd) = 1;
d72886b5 2333 attrs = *mode_mem_attrs[(int) BLKmode];
2334 attrs.alias = new_alias_set ();
2335 attrs.expr = d;
2336 set_mem_attrs (rd, &attrs);
ac681e84 2337 SET_DECL_RTL (d, rd);
2338
2339 return d;
2340}
2341
2342/* Given MEM, a result from assign_stack_local, fill in the memory
2343 attributes as appropriate for a register allocator spill slot.
2344 These slots are not aliasable by other memory. We arrange for
2345 them all to use a single MEM_EXPR, so that the aliasing code can
2346 work properly in the case of shared spill slots. */
2347
2348void
2349set_mem_attrs_for_spill (rtx mem)
2350{
d72886b5 2351 struct mem_attrs attrs;
2352 rtx addr;
ac681e84 2353
d72886b5 2354 attrs = *get_mem_attrs (mem);
2355 attrs.expr = get_spill_slot_decl (true);
2356 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2357 attrs.addrspace = ADDR_SPACE_GENERIC;
ac681e84 2358
2359 /* We expect the incoming memory to be of the form:
2360 (mem:MODE (plus (reg sfp) (const_int offset)))
2361 with perhaps the plus missing for offset = 0. */
2362 addr = XEXP (mem, 0);
6d58bcba 2363 attrs.offset_known_p = true;
2364 attrs.offset = 0;
ac681e84 2365 if (GET_CODE (addr) == PLUS
971ba038 2366 && CONST_INT_P (XEXP (addr, 1)))
6d58bcba 2367 attrs.offset = INTVAL (XEXP (addr, 1));
ac681e84 2368
d72886b5 2369 set_mem_attrs (mem, &attrs);
ac681e84 2370 MEM_NOTRAP_P (mem) = 1;
2371}
2372\f
15bbde2b 2373/* Return a newly created CODE_LABEL rtx with a unique label number. */
2374
2375rtx
35cb5232 2376gen_label_rtx (void)
15bbde2b 2377{
a7ae1e59 2378 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
35cb5232 2379 NULL, label_num++, NULL);
15bbde2b 2380}
2381\f
2382/* For procedure integration. */
2383
15bbde2b 2384/* Install new pointers to the first and last insns in the chain.
d4c332ff 2385 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2386 Used for an inline-procedure after copying the insn chain. */
2387
2388void
35cb5232 2389set_new_first_and_last_insn (rtx first, rtx last)
15bbde2b 2390{
d4c332ff 2391 rtx insn;
2392
06f9d6ef 2393 set_first_insn (first);
2394 set_last_insn (last);
d4c332ff 2395 cur_insn_uid = 0;
2396
9845d120 2397 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2398 {
2399 int debug_count = 0;
2400
2401 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2402 cur_debug_insn_uid = 0;
2403
2404 for (insn = first; insn; insn = NEXT_INSN (insn))
2405 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2406 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2407 else
2408 {
2409 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2410 if (DEBUG_INSN_P (insn))
2411 debug_count++;
2412 }
2413
2414 if (debug_count)
2415 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2416 else
2417 cur_debug_insn_uid++;
2418 }
2419 else
2420 for (insn = first; insn; insn = NEXT_INSN (insn))
2421 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
d4c332ff 2422
2423 cur_insn_uid++;
15bbde2b 2424}
15bbde2b 2425\f
d823ba47 2426/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2427 structure. This routine should only be called once. */
15bbde2b 2428
a40c0eeb 2429static void
df329266 2430unshare_all_rtl_1 (rtx insn)
15bbde2b 2431{
2d96a59a 2432 /* Unshare just about everything else. */
1cd4cfea 2433 unshare_all_rtl_in_chain (insn);
d823ba47 2434
15bbde2b 2435 /* Make sure the addresses of stack slots found outside the insn chain
2436 (such as, in DECL_RTL of a variable) are not shared
2437 with the insn chain.
2438
2439 This special care is necessary when the stack slot MEM does not
2440 actually appear in the insn chain. If it does appear, its address
2441 is unshared from all else at that point. */
45733446 2442 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
15bbde2b 2443}
2444
d823ba47 2445/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2446 structure, again. This is a fairly expensive thing to do so it
2447 should be done sparingly. */
2448
2449void
35cb5232 2450unshare_all_rtl_again (rtx insn)
2d96a59a 2451{
2452 rtx p;
5244079b 2453 tree decl;
2454
2d96a59a 2455 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2456 if (INSN_P (p))
2d96a59a 2457 {
2458 reset_used_flags (PATTERN (p));
2459 reset_used_flags (REG_NOTES (p));
6d2a4bac 2460 if (CALL_P (p))
2461 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2d96a59a 2462 }
5244079b 2463
01dc9f0c 2464 /* Make sure that virtual stack slots are not shared. */
265be050 2465 set_used_decls (DECL_INITIAL (cfun->decl));
01dc9f0c 2466
5244079b 2467 /* Make sure that virtual parameters are not shared. */
1767a056 2468 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
265be050 2469 set_used_flags (DECL_RTL (decl));
5244079b 2470
2471 reset_used_flags (stack_slot_list);
2472
df329266 2473 unshare_all_rtl_1 (insn);
a40c0eeb 2474}
2475
2a1990e9 2476unsigned int
a40c0eeb 2477unshare_all_rtl (void)
2478{
df329266 2479 unshare_all_rtl_1 (get_insns ());
2a1990e9 2480 return 0;
2d96a59a 2481}
2482
77fce4cd 2483
1cd4cfea 2484/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2485 Recursively does the same for subexpressions. */
2486
2487static void
2488verify_rtx_sharing (rtx orig, rtx insn)
2489{
2490 rtx x = orig;
2491 int i;
2492 enum rtx_code code;
2493 const char *format_ptr;
2494
2495 if (x == 0)
2496 return;
2497
2498 code = GET_CODE (x);
2499
2500 /* These types may be freely shared. */
2501
2502 switch (code)
2503 {
2504 case REG:
688ff29b 2505 case DEBUG_EXPR:
2506 case VALUE:
0349edce 2507 CASE_CONST_ANY:
1cd4cfea 2508 case SYMBOL_REF:
2509 case LABEL_REF:
2510 case CODE_LABEL:
2511 case PC:
2512 case CC0:
1a860023 2513 case RETURN:
9cb2517e 2514 case SIMPLE_RETURN:
1cd4cfea 2515 case SCRATCH:
1cd4cfea 2516 return;
c09425a0 2517 /* SCRATCH must be shared because they represent distinct values. */
2518 case CLOBBER:
2519 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2520 return;
2521 break;
1cd4cfea 2522
2523 case CONST:
3072d30e 2524 if (shared_const_p (orig))
1cd4cfea 2525 return;
2526 break;
2527
2528 case MEM:
2529 /* A MEM is allowed to be shared if its address is constant. */
2530 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2531 || reload_completed || reload_in_progress)
2532 return;
2533
2534 break;
2535
2536 default:
2537 break;
2538 }
2539
2540 /* This rtx may not be shared. If it has already been seen,
2541 replace it with a copy of itself. */
9cee7c3f 2542#ifdef ENABLE_CHECKING
1cd4cfea 2543 if (RTX_FLAG (x, used))
2544 {
0a81f5a0 2545 error ("invalid rtl sharing found in the insn");
1cd4cfea 2546 debug_rtx (insn);
0a81f5a0 2547 error ("shared rtx");
1cd4cfea 2548 debug_rtx (x);
0a81f5a0 2549 internal_error ("internal consistency failure");
1cd4cfea 2550 }
9cee7c3f 2551#endif
2552 gcc_assert (!RTX_FLAG (x, used));
48e1416a 2553
1cd4cfea 2554 RTX_FLAG (x, used) = 1;
2555
8b332087 2556 /* Now scan the subexpressions recursively. */
1cd4cfea 2557
2558 format_ptr = GET_RTX_FORMAT (code);
2559
2560 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2561 {
2562 switch (*format_ptr++)
2563 {
2564 case 'e':
2565 verify_rtx_sharing (XEXP (x, i), insn);
2566 break;
2567
2568 case 'E':
2569 if (XVEC (x, i) != NULL)
2570 {
2571 int j;
2572 int len = XVECLEN (x, i);
2573
2574 for (j = 0; j < len; j++)
2575 {
9cee7c3f 2576 /* We allow sharing of ASM_OPERANDS inside single
2577 instruction. */
1cd4cfea 2578 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
9cee7c3f 2579 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2580 == ASM_OPERANDS))
1cd4cfea 2581 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2582 else
2583 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2584 }
2585 }
2586 break;
2587 }
2588 }
2589 return;
2590}
2591
c7bf1374 2592/* Go through all the RTL insn bodies and check that there is no unexpected
1cd4cfea 2593 sharing in between the subexpressions. */
2594
4b987fac 2595DEBUG_FUNCTION void
1cd4cfea 2596verify_rtl_sharing (void)
2597{
2598 rtx p;
2599
4b366dd3 2600 timevar_push (TV_VERIFY_RTL_SHARING);
2601
1cd4cfea 2602 for (p = get_insns (); p; p = NEXT_INSN (p))
2603 if (INSN_P (p))
2604 {
2605 reset_used_flags (PATTERN (p));
2606 reset_used_flags (REG_NOTES (p));
6d2a4bac 2607 if (CALL_P (p))
2608 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
764f640f 2609 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2610 {
2611 int i;
2612 rtx q, sequence = PATTERN (p);
2613
2614 for (i = 0; i < XVECLEN (sequence, 0); i++)
2615 {
2616 q = XVECEXP (sequence, 0, i);
2617 gcc_assert (INSN_P (q));
2618 reset_used_flags (PATTERN (q));
2619 reset_used_flags (REG_NOTES (q));
6d2a4bac 2620 if (CALL_P (q))
2621 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
764f640f 2622 }
2623 }
1cd4cfea 2624 }
2625
2626 for (p = get_insns (); p; p = NEXT_INSN (p))
2627 if (INSN_P (p))
2628 {
2629 verify_rtx_sharing (PATTERN (p), p);
2630 verify_rtx_sharing (REG_NOTES (p), p);
6d2a4bac 2631 if (CALL_P (p))
2632 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
1cd4cfea 2633 }
4b366dd3 2634
2635 timevar_pop (TV_VERIFY_RTL_SHARING);
1cd4cfea 2636}
2637
2d96a59a 2638/* Go through all the RTL insn bodies and copy any invalid shared structure.
2639 Assumes the mark bits are cleared at entry. */
2640
1cd4cfea 2641void
2642unshare_all_rtl_in_chain (rtx insn)
2d96a59a 2643{
2644 for (; insn; insn = NEXT_INSN (insn))
9204e736 2645 if (INSN_P (insn))
2d96a59a 2646 {
2647 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2648 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
6d2a4bac 2649 if (CALL_P (insn))
2650 CALL_INSN_FUNCTION_USAGE (insn)
2651 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2d96a59a 2652 }
2653}
2654
01dc9f0c 2655/* Go through all virtual stack slots of a function and mark them as
265be050 2656 shared. We never replace the DECL_RTLs themselves with a copy,
2657 but expressions mentioned into a DECL_RTL cannot be shared with
2658 expressions in the instruction stream.
2659
2660 Note that reload may convert pseudo registers into memories in-place.
2661 Pseudo registers are always shared, but MEMs never are. Thus if we
2662 reset the used flags on MEMs in the instruction stream, we must set
2663 them again on MEMs that appear in DECL_RTLs. */
2664
01dc9f0c 2665static void
265be050 2666set_used_decls (tree blk)
01dc9f0c 2667{
2668 tree t;
2669
2670 /* Mark decls. */
1767a056 2671 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
0e8e37b2 2672 if (DECL_RTL_SET_P (t))
265be050 2673 set_used_flags (DECL_RTL (t));
01dc9f0c 2674
2675 /* Now process sub-blocks. */
93110716 2676 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
265be050 2677 set_used_decls (t);
01dc9f0c 2678}
2679
15bbde2b 2680/* Mark ORIG as in use, and return a copy of it if it was already in use.
7ba6ce7a 2681 Recursively does the same for subexpressions. Uses
2682 copy_rtx_if_shared_1 to reduce stack space. */
15bbde2b 2683
2684rtx
35cb5232 2685copy_rtx_if_shared (rtx orig)
15bbde2b 2686{
0e0727c4 2687 copy_rtx_if_shared_1 (&orig);
2688 return orig;
2689}
2690
7ba6ce7a 2691/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2692 use. Recursively does the same for subexpressions. */
2693
0e0727c4 2694static void
2695copy_rtx_if_shared_1 (rtx *orig1)
2696{
2697 rtx x;
19cb6b50 2698 int i;
2699 enum rtx_code code;
0e0727c4 2700 rtx *last_ptr;
19cb6b50 2701 const char *format_ptr;
15bbde2b 2702 int copied = 0;
0e0727c4 2703 int length;
2704
2705 /* Repeat is used to turn tail-recursion into iteration. */
2706repeat:
2707 x = *orig1;
15bbde2b 2708
2709 if (x == 0)
0e0727c4 2710 return;
15bbde2b 2711
2712 code = GET_CODE (x);
2713
2714 /* These types may be freely shared. */
2715
2716 switch (code)
2717 {
2718 case REG:
688ff29b 2719 case DEBUG_EXPR:
2720 case VALUE:
0349edce 2721 CASE_CONST_ANY:
15bbde2b 2722 case SYMBOL_REF:
1cd4cfea 2723 case LABEL_REF:
15bbde2b 2724 case CODE_LABEL:
2725 case PC:
2726 case CC0:
e0691b9a 2727 case RETURN:
9cb2517e 2728 case SIMPLE_RETURN:
15bbde2b 2729 case SCRATCH:
a92771b8 2730 /* SCRATCH must be shared because they represent distinct values. */
0e0727c4 2731 return;
c09425a0 2732 case CLOBBER:
2733 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2734 return;
2735 break;
15bbde2b 2736
f63d12e3 2737 case CONST:
3072d30e 2738 if (shared_const_p (x))
0e0727c4 2739 return;
f63d12e3 2740 break;
2741
9845d120 2742 case DEBUG_INSN:
15bbde2b 2743 case INSN:
2744 case JUMP_INSN:
2745 case CALL_INSN:
2746 case NOTE:
15bbde2b 2747 case BARRIER:
2748 /* The chain of insns is not being copied. */
0e0727c4 2749 return;
15bbde2b 2750
0dbd1c74 2751 default:
2752 break;
15bbde2b 2753 }
2754
2755 /* This rtx may not be shared. If it has already been seen,
2756 replace it with a copy of itself. */
2757
7c25cb91 2758 if (RTX_FLAG (x, used))
15bbde2b 2759 {
f2d0e9f1 2760 x = shallow_copy_rtx (x);
15bbde2b 2761 copied = 1;
2762 }
7c25cb91 2763 RTX_FLAG (x, used) = 1;
15bbde2b 2764
2765 /* Now scan the subexpressions recursively.
2766 We can store any replaced subexpressions directly into X
2767 since we know X is not shared! Any vectors in X
2768 must be copied if X was copied. */
2769
2770 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2771 length = GET_RTX_LENGTH (code);
2772 last_ptr = NULL;
48e1416a 2773
0e0727c4 2774 for (i = 0; i < length; i++)
15bbde2b 2775 {
2776 switch (*format_ptr++)
2777 {
2778 case 'e':
0e0727c4 2779 if (last_ptr)
2780 copy_rtx_if_shared_1 (last_ptr);
2781 last_ptr = &XEXP (x, i);
15bbde2b 2782 break;
2783
2784 case 'E':
2785 if (XVEC (x, i) != NULL)
2786 {
19cb6b50 2787 int j;
ffe0869b 2788 int len = XVECLEN (x, i);
48e1416a 2789
8b332087 2790 /* Copy the vector iff I copied the rtx and the length
2791 is nonzero. */
ffe0869b 2792 if (copied && len > 0)
a4070a91 2793 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
48e1416a 2794
d632b59a 2795 /* Call recursively on all inside the vector. */
ffe0869b 2796 for (j = 0; j < len; j++)
0e0727c4 2797 {
2798 if (last_ptr)
2799 copy_rtx_if_shared_1 (last_ptr);
2800 last_ptr = &XVECEXP (x, i, j);
2801 }
15bbde2b 2802 }
2803 break;
2804 }
2805 }
0e0727c4 2806 *orig1 = x;
2807 if (last_ptr)
2808 {
2809 orig1 = last_ptr;
2810 goto repeat;
2811 }
2812 return;
15bbde2b 2813}
2814
709947e6 2815/* Set the USED bit in X and its non-shareable subparts to FLAG. */
15bbde2b 2816
709947e6 2817static void
2818mark_used_flags (rtx x, int flag)
15bbde2b 2819{
19cb6b50 2820 int i, j;
2821 enum rtx_code code;
2822 const char *format_ptr;
0e0727c4 2823 int length;
15bbde2b 2824
0e0727c4 2825 /* Repeat is used to turn tail-recursion into iteration. */
2826repeat:
15bbde2b 2827 if (x == 0)
2828 return;
2829
2830 code = GET_CODE (x);
2831
c3418f42 2832 /* These types may be freely shared so we needn't do any resetting
15bbde2b 2833 for them. */
2834
2835 switch (code)
2836 {
2837 case REG:
688ff29b 2838 case DEBUG_EXPR:
2839 case VALUE:
0349edce 2840 CASE_CONST_ANY:
15bbde2b 2841 case SYMBOL_REF:
2842 case CODE_LABEL:
2843 case PC:
2844 case CC0:
e0691b9a 2845 case RETURN:
9cb2517e 2846 case SIMPLE_RETURN:
15bbde2b 2847 return;
2848
9845d120 2849 case DEBUG_INSN:
15bbde2b 2850 case INSN:
2851 case JUMP_INSN:
2852 case CALL_INSN:
2853 case NOTE:
2854 case LABEL_REF:
2855 case BARRIER:
2856 /* The chain of insns is not being copied. */
2857 return;
d823ba47 2858
0dbd1c74 2859 default:
2860 break;
15bbde2b 2861 }
2862
709947e6 2863 RTX_FLAG (x, used) = flag;
15bbde2b 2864
2865 format_ptr = GET_RTX_FORMAT (code);
0e0727c4 2866 length = GET_RTX_LENGTH (code);
48e1416a 2867
0e0727c4 2868 for (i = 0; i < length; i++)
15bbde2b 2869 {
2870 switch (*format_ptr++)
2871 {
2872 case 'e':
0e0727c4 2873 if (i == length-1)
2874 {
2875 x = XEXP (x, i);
2876 goto repeat;
2877 }
709947e6 2878 mark_used_flags (XEXP (x, i), flag);
15bbde2b 2879 break;
2880
2881 case 'E':
2882 for (j = 0; j < XVECLEN (x, i); j++)
709947e6 2883 mark_used_flags (XVECEXP (x, i, j), flag);
15bbde2b 2884 break;
2885 }
2886 }
2887}
1cd4cfea 2888
709947e6 2889/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1cd4cfea 2890 to look for shared sub-parts. */
2891
2892void
709947e6 2893reset_used_flags (rtx x)
1cd4cfea 2894{
709947e6 2895 mark_used_flags (x, 0);
2896}
1cd4cfea 2897
709947e6 2898/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2899 to look for shared sub-parts. */
1cd4cfea 2900
709947e6 2901void
2902set_used_flags (rtx x)
2903{
2904 mark_used_flags (x, 1);
1cd4cfea 2905}
15bbde2b 2906\f
2907/* Copy X if necessary so that it won't be altered by changes in OTHER.
2908 Return X or the rtx for the pseudo reg the value of X was copied into.
2909 OTHER must be valid as a SET_DEST. */
2910
2911rtx
35cb5232 2912make_safe_from (rtx x, rtx other)
15bbde2b 2913{
2914 while (1)
2915 switch (GET_CODE (other))
2916 {
2917 case SUBREG:
2918 other = SUBREG_REG (other);
2919 break;
2920 case STRICT_LOW_PART:
2921 case SIGN_EXTEND:
2922 case ZERO_EXTEND:
2923 other = XEXP (other, 0);
2924 break;
2925 default:
2926 goto done;
2927 }
2928 done:
e16ceb8e 2929 if ((MEM_P (other)
15bbde2b 2930 && ! CONSTANT_P (x)
8ad4c111 2931 && !REG_P (x)
15bbde2b 2932 && GET_CODE (x) != SUBREG)
8ad4c111 2933 || (REG_P (other)
15bbde2b 2934 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2935 || reg_mentioned_p (other, x))))
2936 {
2937 rtx temp = gen_reg_rtx (GET_MODE (x));
2938 emit_move_insn (temp, x);
2939 return temp;
2940 }
2941 return x;
2942}
2943\f
2944/* Emission of insns (adding them to the doubly-linked list). */
2945
15bbde2b 2946/* Return the last insn emitted, even if it is in a sequence now pushed. */
2947
2948rtx
35cb5232 2949get_last_insn_anywhere (void)
15bbde2b 2950{
2951 struct sequence_stack *stack;
06f9d6ef 2952 if (get_last_insn ())
2953 return get_last_insn ();
0a893c29 2954 for (stack = seq_stack; stack; stack = stack->next)
15bbde2b 2955 if (stack->last != 0)
2956 return stack->last;
2957 return 0;
2958}
2959
70545de4 2960/* Return the first nonnote insn emitted in current sequence or current
2961 function. This routine looks inside SEQUENCEs. */
2962
2963rtx
35cb5232 2964get_first_nonnote_insn (void)
70545de4 2965{
06f9d6ef 2966 rtx insn = get_insns ();
f86e856e 2967
2968 if (insn)
2969 {
2970 if (NOTE_P (insn))
2971 for (insn = next_insn (insn);
2972 insn && NOTE_P (insn);
2973 insn = next_insn (insn))
2974 continue;
2975 else
2976 {
1c14a50e 2977 if (NONJUMP_INSN_P (insn)
f86e856e 2978 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2979 insn = XVECEXP (PATTERN (insn), 0, 0);
2980 }
2981 }
70545de4 2982
2983 return insn;
2984}
2985
2986/* Return the last nonnote insn emitted in current sequence or current
2987 function. This routine looks inside SEQUENCEs. */
2988
2989rtx
35cb5232 2990get_last_nonnote_insn (void)
70545de4 2991{
06f9d6ef 2992 rtx insn = get_last_insn ();
f86e856e 2993
2994 if (insn)
2995 {
2996 if (NOTE_P (insn))
2997 for (insn = previous_insn (insn);
2998 insn && NOTE_P (insn);
2999 insn = previous_insn (insn))
3000 continue;
3001 else
3002 {
1c14a50e 3003 if (NONJUMP_INSN_P (insn)
f86e856e 3004 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3005 insn = XVECEXP (PATTERN (insn), 0,
3006 XVECLEN (PATTERN (insn), 0) - 1);
3007 }
3008 }
70545de4 3009
3010 return insn;
3011}
3012
9845d120 3013/* Return the number of actual (non-debug) insns emitted in this
3014 function. */
3015
3016int
3017get_max_insn_count (void)
3018{
3019 int n = cur_insn_uid;
3020
3021 /* The table size must be stable across -g, to avoid codegen
3022 differences due to debug insns, and not be affected by
3023 -fmin-insn-uid, to avoid excessive table size and to simplify
3024 debugging of -fcompare-debug failures. */
3025 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3026 n -= cur_debug_insn_uid;
3027 else
3028 n -= MIN_NONDEBUG_INSN_UID;
3029
3030 return n;
3031}
3032
15bbde2b 3033\f
3034/* Return the next insn. If it is a SEQUENCE, return the first insn
3035 of the sequence. */
3036
3037rtx
35cb5232 3038next_insn (rtx insn)
15bbde2b 3039{
ce4469fa 3040 if (insn)
3041 {
3042 insn = NEXT_INSN (insn);
3043 if (insn && NONJUMP_INSN_P (insn)
3044 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3045 insn = XVECEXP (PATTERN (insn), 0, 0);
3046 }
15bbde2b 3047
ce4469fa 3048 return insn;
15bbde2b 3049}
3050
3051/* Return the previous insn. If it is a SEQUENCE, return the last insn
3052 of the sequence. */
3053
3054rtx
35cb5232 3055previous_insn (rtx insn)
15bbde2b 3056{
ce4469fa 3057 if (insn)
3058 {
3059 insn = PREV_INSN (insn);
3060 if (insn && NONJUMP_INSN_P (insn)
3061 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3062 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3063 }
15bbde2b 3064
ce4469fa 3065 return insn;
15bbde2b 3066}
3067
3068/* Return the next insn after INSN that is not a NOTE. This routine does not
3069 look inside SEQUENCEs. */
3070
3071rtx
35cb5232 3072next_nonnote_insn (rtx insn)
15bbde2b 3073{
ce4469fa 3074 while (insn)
3075 {
3076 insn = NEXT_INSN (insn);
3077 if (insn == 0 || !NOTE_P (insn))
3078 break;
3079 }
15bbde2b 3080
ce4469fa 3081 return insn;
15bbde2b 3082}
3083
c4d13c5c 3084/* Return the next insn after INSN that is not a NOTE, but stop the
3085 search before we enter another basic block. This routine does not
3086 look inside SEQUENCEs. */
3087
3088rtx
3089next_nonnote_insn_bb (rtx insn)
3090{
3091 while (insn)
3092 {
3093 insn = NEXT_INSN (insn);
3094 if (insn == 0 || !NOTE_P (insn))
3095 break;
3096 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3097 return NULL_RTX;
3098 }
3099
3100 return insn;
3101}
3102
15bbde2b 3103/* Return the previous insn before INSN that is not a NOTE. This routine does
3104 not look inside SEQUENCEs. */
3105
3106rtx
35cb5232 3107prev_nonnote_insn (rtx insn)
15bbde2b 3108{
ce4469fa 3109 while (insn)
3110 {
3111 insn = PREV_INSN (insn);
3112 if (insn == 0 || !NOTE_P (insn))
3113 break;
3114 }
15bbde2b 3115
ce4469fa 3116 return insn;
15bbde2b 3117}
3118
bcc66782 3119/* Return the previous insn before INSN that is not a NOTE, but stop
3120 the search before we enter another basic block. This routine does
3121 not look inside SEQUENCEs. */
3122
3123rtx
3124prev_nonnote_insn_bb (rtx insn)
3125{
3126 while (insn)
3127 {
3128 insn = PREV_INSN (insn);
3129 if (insn == 0 || !NOTE_P (insn))
3130 break;
3131 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3132 return NULL_RTX;
3133 }
3134
3135 return insn;
3136}
3137
9845d120 3138/* Return the next insn after INSN that is not a DEBUG_INSN. This
3139 routine does not look inside SEQUENCEs. */
3140
3141rtx
3142next_nondebug_insn (rtx insn)
3143{
3144 while (insn)
3145 {
3146 insn = NEXT_INSN (insn);
3147 if (insn == 0 || !DEBUG_INSN_P (insn))
3148 break;
3149 }
3150
3151 return insn;
3152}
3153
3154/* Return the previous insn before INSN that is not a DEBUG_INSN.
3155 This routine does not look inside SEQUENCEs. */
3156
3157rtx
3158prev_nondebug_insn (rtx insn)
3159{
3160 while (insn)
3161 {
3162 insn = PREV_INSN (insn);
3163 if (insn == 0 || !DEBUG_INSN_P (insn))
3164 break;
3165 }
3166
3167 return insn;
3168}
3169
5b8537a8 3170/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3171 This routine does not look inside SEQUENCEs. */
3172
3173rtx
3174next_nonnote_nondebug_insn (rtx insn)
3175{
3176 while (insn)
3177 {
3178 insn = NEXT_INSN (insn);
3179 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3180 break;
3181 }
3182
3183 return insn;
3184}
3185
3186/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3187 This routine does not look inside SEQUENCEs. */
3188
3189rtx
3190prev_nonnote_nondebug_insn (rtx insn)
3191{
3192 while (insn)
3193 {
3194 insn = PREV_INSN (insn);
3195 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3196 break;
3197 }
3198
3199 return insn;
3200}
3201
15bbde2b 3202/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3203 or 0, if there is none. This routine does not look inside
a92771b8 3204 SEQUENCEs. */
15bbde2b 3205
3206rtx
35cb5232 3207next_real_insn (rtx insn)
15bbde2b 3208{
ce4469fa 3209 while (insn)
3210 {
3211 insn = NEXT_INSN (insn);
3212 if (insn == 0 || INSN_P (insn))
3213 break;
3214 }
15bbde2b 3215
ce4469fa 3216 return insn;
15bbde2b 3217}
3218
3219/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3220 or 0, if there is none. This routine does not look inside
3221 SEQUENCEs. */
3222
3223rtx
35cb5232 3224prev_real_insn (rtx insn)
15bbde2b 3225{
ce4469fa 3226 while (insn)
3227 {
3228 insn = PREV_INSN (insn);
3229 if (insn == 0 || INSN_P (insn))
3230 break;
3231 }
15bbde2b 3232
ce4469fa 3233 return insn;
15bbde2b 3234}
3235
d5f9786f 3236/* Return the last CALL_INSN in the current list, or 0 if there is none.
3237 This routine does not look inside SEQUENCEs. */
3238
3239rtx
35cb5232 3240last_call_insn (void)
d5f9786f 3241{
3242 rtx insn;
3243
3244 for (insn = get_last_insn ();
6d7dc5b9 3245 insn && !CALL_P (insn);
d5f9786f 3246 insn = PREV_INSN (insn))
3247 ;
3248
3249 return insn;
3250}
3251
15bbde2b 3252/* Find the next insn after INSN that really does something. This routine
084950ee 3253 does not look inside SEQUENCEs. After reload this also skips over
3254 standalone USE and CLOBBER insn. */
15bbde2b 3255
2215ca0d 3256int
52d07779 3257active_insn_p (const_rtx insn)
2215ca0d 3258{
6d7dc5b9 3259 return (CALL_P (insn) || JUMP_P (insn)
3260 || (NONJUMP_INSN_P (insn)
3a66feab 3261 && (! reload_completed
3262 || (GET_CODE (PATTERN (insn)) != USE
3263 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3264}
3265
15bbde2b 3266rtx
35cb5232 3267next_active_insn (rtx insn)
15bbde2b 3268{
ce4469fa 3269 while (insn)
3270 {
3271 insn = NEXT_INSN (insn);
3272 if (insn == 0 || active_insn_p (insn))
3273 break;
3274 }
15bbde2b 3275
ce4469fa 3276 return insn;
15bbde2b 3277}
3278
3279/* Find the last insn before INSN that really does something. This routine
084950ee 3280 does not look inside SEQUENCEs. After reload this also skips over
3281 standalone USE and CLOBBER insn. */
15bbde2b 3282
3283rtx
35cb5232 3284prev_active_insn (rtx insn)
15bbde2b 3285{
ce4469fa 3286 while (insn)
3287 {
3288 insn = PREV_INSN (insn);
3289 if (insn == 0 || active_insn_p (insn))
3290 break;
3291 }
15bbde2b 3292
ce4469fa 3293 return insn;
15bbde2b 3294}
3295
3296/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3297
3298rtx
35cb5232 3299next_label (rtx insn)
15bbde2b 3300{
ce4469fa 3301 while (insn)
3302 {
3303 insn = NEXT_INSN (insn);
3304 if (insn == 0 || LABEL_P (insn))
3305 break;
3306 }
15bbde2b 3307
ce4469fa 3308 return insn;
15bbde2b 3309}
3310
4115ac36 3311/* Return the last label to mark the same position as LABEL. Return LABEL
3312 itself if it is null or any return rtx. */
67c5e2a9 3313
3314rtx
3315skip_consecutive_labels (rtx label)
3316{
3317 rtx insn;
3318
4115ac36 3319 if (label && ANY_RETURN_P (label))
3320 return label;
3321
67c5e2a9 3322 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3323 if (LABEL_P (insn))
3324 label = insn;
3325
3326 return label;
3327}
15bbde2b 3328\f
3329#ifdef HAVE_cc0
b15e0bba 3330/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3331 and REG_CC_USER notes so we can find it. */
3332
3333void
35cb5232 3334link_cc0_insns (rtx insn)
b15e0bba 3335{
3336 rtx user = next_nonnote_insn (insn);
3337
6d7dc5b9 3338 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
b15e0bba 3339 user = XVECEXP (PATTERN (user), 0, 0);
3340
a1ddb869 3341 add_reg_note (user, REG_CC_SETTER, insn);
3342 add_reg_note (insn, REG_CC_USER, user);
b15e0bba 3343}
3344
15bbde2b 3345/* Return the next insn that uses CC0 after INSN, which is assumed to
3346 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3347 applied to the result of this function should yield INSN).
3348
3349 Normally, this is simply the next insn. However, if a REG_CC_USER note
3350 is present, it contains the insn that uses CC0.
3351
3352 Return 0 if we can't find the insn. */
3353
3354rtx
35cb5232 3355next_cc0_user (rtx insn)
15bbde2b 3356{
b572011e 3357 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3358
3359 if (note)
3360 return XEXP (note, 0);
3361
3362 insn = next_nonnote_insn (insn);
6d7dc5b9 3363 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
15bbde2b 3364 insn = XVECEXP (PATTERN (insn), 0, 0);
3365
9204e736 3366 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
15bbde2b 3367 return insn;
3368
3369 return 0;
3370}
3371
3372/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3373 note, it is the previous insn. */
3374
3375rtx
35cb5232 3376prev_cc0_setter (rtx insn)
15bbde2b 3377{
b572011e 3378 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3379
3380 if (note)
3381 return XEXP (note, 0);
3382
3383 insn = prev_nonnote_insn (insn);
611234b4 3384 gcc_assert (sets_cc0_p (PATTERN (insn)));
15bbde2b 3385
3386 return insn;
3387}
3388#endif
344dc2fa 3389
698ff1f0 3390#ifdef AUTO_INC_DEC
3391/* Find a RTX_AUTOINC class rtx which matches DATA. */
3392
3393static int
3394find_auto_inc (rtx *xp, void *data)
3395{
3396 rtx x = *xp;
225ab426 3397 rtx reg = (rtx) data;
698ff1f0 3398
3399 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3400 return 0;
3401
3402 switch (GET_CODE (x))
3403 {
3404 case PRE_DEC:
3405 case PRE_INC:
3406 case POST_DEC:
3407 case POST_INC:
3408 case PRE_MODIFY:
3409 case POST_MODIFY:
3410 if (rtx_equal_p (reg, XEXP (x, 0)))
3411 return 1;
3412 break;
3413
3414 default:
3415 gcc_unreachable ();
3416 }
3417 return -1;
3418}
3419#endif
3420
344dc2fa 3421/* Increment the label uses for all labels present in rtx. */
3422
3423static void
35cb5232 3424mark_label_nuses (rtx x)
344dc2fa 3425{
19cb6b50 3426 enum rtx_code code;
3427 int i, j;
3428 const char *fmt;
344dc2fa 3429
3430 code = GET_CODE (x);
a030d4a8 3431 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
344dc2fa 3432 LABEL_NUSES (XEXP (x, 0))++;
3433
3434 fmt = GET_RTX_FORMAT (code);
3435 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3436 {
3437 if (fmt[i] == 'e')
ff385626 3438 mark_label_nuses (XEXP (x, i));
344dc2fa 3439 else if (fmt[i] == 'E')
ff385626 3440 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3441 mark_label_nuses (XVECEXP (x, i, j));
3442 }
3443}
3444
15bbde2b 3445\f
3446/* Try splitting insns that can be split for better scheduling.
3447 PAT is the pattern which might split.
3448 TRIAL is the insn providing PAT.
6ef828f9 3449 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3450
3451 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3452 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3453 returns TRIAL. If the insn to be returned can be split, it will be. */
3454
3455rtx
35cb5232 3456try_split (rtx pat, rtx trial, int last)
15bbde2b 3457{
3458 rtx before = PREV_INSN (trial);
3459 rtx after = NEXT_INSN (trial);
15bbde2b 3460 int has_barrier = 0;
1e5b92fa 3461 rtx note, seq, tem;
3cd757b1 3462 int probability;
e13693ec 3463 rtx insn_last, insn;
3464 int njumps = 0;
3cd757b1 3465
25e880b1 3466 /* We're not good at redistributing frame information. */
3467 if (RTX_FRAME_RELATED_P (trial))
3468 return trial;
3469
3cd757b1 3470 if (any_condjump_p (trial)
3471 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3472 split_branch_probability = INTVAL (XEXP (note, 0));
3473 probability = split_branch_probability;
3474
3475 seq = split_insns (pat, trial);
3476
3477 split_branch_probability = -1;
15bbde2b 3478
3479 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3480 We may need to handle this specially. */
6d7dc5b9 3481 if (after && BARRIER_P (after))
15bbde2b 3482 {
3483 has_barrier = 1;
3484 after = NEXT_INSN (after);
3485 }
3486
e13693ec 3487 if (!seq)
3488 return trial;
3489
3490 /* Avoid infinite loop if any insn of the result matches
3491 the original pattern. */
3492 insn_last = seq;
3493 while (1)
15bbde2b 3494 {
e13693ec 3495 if (INSN_P (insn_last)
3496 && rtx_equal_p (PATTERN (insn_last), pat))
3497 return trial;
3498 if (!NEXT_INSN (insn_last))
3499 break;
3500 insn_last = NEXT_INSN (insn_last);
3501 }
d823ba47 3502
3072d30e 3503 /* We will be adding the new sequence to the function. The splitters
3504 may have introduced invalid RTL sharing, so unshare the sequence now. */
3505 unshare_all_rtl_in_chain (seq);
3506
e13693ec 3507 /* Mark labels. */
3508 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3509 {
6d7dc5b9 3510 if (JUMP_P (insn))
e13693ec 3511 {
3512 mark_jump_label (PATTERN (insn), insn, 0);
3513 njumps++;
3514 if (probability != -1
3515 && any_condjump_p (insn)
3516 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3517 {
e13693ec 3518 /* We can preserve the REG_BR_PROB notes only if exactly
3519 one jump is created, otherwise the machine description
3520 is responsible for this step using
3521 split_branch_probability variable. */
611234b4 3522 gcc_assert (njumps == 1);
a1ddb869 3523 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
31d3e01c 3524 }
e13693ec 3525 }
3526 }
3527
3528 /* If we are splitting a CALL_INSN, look for the CALL_INSN
b0bd0491 3529 in SEQ and copy any additional information across. */
6d7dc5b9 3530 if (CALL_P (trial))
e13693ec 3531 {
3532 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
6d7dc5b9 3533 if (CALL_P (insn))
e13693ec 3534 {
b0bd0491 3535 rtx next, *p;
3536
3537 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3538 target may have explicitly specified. */
3539 p = &CALL_INSN_FUNCTION_USAGE (insn);
0bb5a6cd 3540 while (*p)
3541 p = &XEXP (*p, 1);
3542 *p = CALL_INSN_FUNCTION_USAGE (trial);
b0bd0491 3543
3544 /* If the old call was a sibling call, the new one must
3545 be too. */
e13693ec 3546 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
b0bd0491 3547
3548 /* If the new call is the last instruction in the sequence,
3549 it will effectively replace the old call in-situ. Otherwise
3550 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3551 so that it comes immediately after the new call. */
3552 if (NEXT_INSN (insn))
47e1410d 3553 for (next = NEXT_INSN (trial);
3554 next && NOTE_P (next);
3555 next = NEXT_INSN (next))
3556 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
b0bd0491 3557 {
3558 remove_insn (next);
3559 add_insn_after (next, insn, NULL);
47e1410d 3560 break;
b0bd0491 3561 }
e13693ec 3562 }
3563 }
5262c253 3564
e13693ec 3565 /* Copy notes, particularly those related to the CFG. */
3566 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3567 {
3568 switch (REG_NOTE_KIND (note))
3569 {
3570 case REG_EH_REGION:
e38def9c 3571 copy_reg_eh_region_note_backward (note, insn_last, NULL);
e13693ec 3572 break;
381eb1e7 3573
e13693ec 3574 case REG_NORETURN:
3575 case REG_SETJMP:
4c0315d0 3576 case REG_TM:
698ff1f0 3577 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
381eb1e7 3578 {
6d7dc5b9 3579 if (CALL_P (insn))
a1ddb869 3580 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
381eb1e7 3581 }
e13693ec 3582 break;
5bb27a4b 3583
e13693ec 3584 case REG_NON_LOCAL_GOTO:
698ff1f0 3585 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
31d3e01c 3586 {
6d7dc5b9 3587 if (JUMP_P (insn))
a1ddb869 3588 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
31d3e01c 3589 }
e13693ec 3590 break;
344dc2fa 3591
698ff1f0 3592#ifdef AUTO_INC_DEC
3593 case REG_INC:
3594 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3595 {
3596 rtx reg = XEXP (note, 0);
3597 if (!FIND_REG_INC_NOTE (insn, reg)
3598 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
a1ddb869 3599 add_reg_note (insn, REG_INC, reg);
698ff1f0 3600 }
3601 break;
3602#endif
3603
dfe00a8f 3604 case REG_ARGS_SIZE:
3605 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3606 break;
3607
e13693ec 3608 default:
3609 break;
15bbde2b 3610 }
e13693ec 3611 }
3612
3613 /* If there are LABELS inside the split insns increment the
3614 usage count so we don't delete the label. */
19d2fe05 3615 if (INSN_P (trial))
e13693ec 3616 {
3617 insn = insn_last;
3618 while (insn != NULL_RTX)
15bbde2b 3619 {
19d2fe05 3620 /* JUMP_P insns have already been "marked" above. */
6d7dc5b9 3621 if (NONJUMP_INSN_P (insn))
e13693ec 3622 mark_label_nuses (PATTERN (insn));
15bbde2b 3623
e13693ec 3624 insn = PREV_INSN (insn);
3625 }
15bbde2b 3626 }
3627
13751393 3628 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
e13693ec 3629
3630 delete_insn (trial);
3631 if (has_barrier)
3632 emit_barrier_after (tem);
3633
3634 /* Recursively call try_split for each new insn created; by the
3635 time control returns here that insn will be fully split, so
3636 set LAST and continue from the insn after the one returned.
3637 We can't use next_active_insn here since AFTER may be a note.
3638 Ignore deleted insns, which can be occur if not optimizing. */
3639 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3640 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3641 tem = try_split (PATTERN (tem), tem, 1);
3642
3643 /* Return either the first or the last insn, depending on which was
3644 requested. */
3645 return last
06f9d6ef 3646 ? (after ? PREV_INSN (after) : get_last_insn ())
e13693ec 3647 : NEXT_INSN (before);
15bbde2b 3648}
3649\f
3650/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3651 Store PATTERN in the pattern slots. */
15bbde2b 3652
3653rtx
35cb5232 3654make_insn_raw (rtx pattern)
15bbde2b 3655{
19cb6b50 3656 rtx insn;
15bbde2b 3657
d7c47c0e 3658 insn = rtx_alloc (INSN);
15bbde2b 3659
575333f9 3660 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3661 PATTERN (insn) = pattern;
3662 INSN_CODE (insn) = -1;
fc92fa61 3663 REG_NOTES (insn) = NULL;
375c1c8a 3664 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3665 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3666
fe7f701d 3667#ifdef ENABLE_RTL_CHECKING
3668 if (insn
9204e736 3669 && INSN_P (insn)
fe7f701d 3670 && (returnjump_p (insn)
3671 || (GET_CODE (insn) == SET
3672 && SET_DEST (insn) == pc_rtx)))
3673 {
c3ceba8e 3674 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
fe7f701d 3675 debug_rtx (insn);
3676 }
3677#endif
d823ba47 3678
15bbde2b 3679 return insn;
3680}
3681
9845d120 3682/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3683
e4fdf07d 3684static rtx
9845d120 3685make_debug_insn_raw (rtx pattern)
3686{
3687 rtx insn;
3688
3689 insn = rtx_alloc (DEBUG_INSN);
3690 INSN_UID (insn) = cur_debug_insn_uid++;
3691 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3692 INSN_UID (insn) = cur_insn_uid++;
3693
3694 PATTERN (insn) = pattern;
3695 INSN_CODE (insn) = -1;
3696 REG_NOTES (insn) = NULL;
3697 INSN_LOCATOR (insn) = curr_insn_locator ();
3698 BLOCK_FOR_INSN (insn) = NULL;
3699
3700 return insn;
3701}
3702
31d3e01c 3703/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3704
e4fdf07d 3705static rtx
35cb5232 3706make_jump_insn_raw (rtx pattern)
15bbde2b 3707{
19cb6b50 3708 rtx insn;
15bbde2b 3709
6a84e367 3710 insn = rtx_alloc (JUMP_INSN);
fc92fa61 3711 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3712
3713 PATTERN (insn) = pattern;
3714 INSN_CODE (insn) = -1;
fc92fa61 3715 REG_NOTES (insn) = NULL;
3716 JUMP_LABEL (insn) = NULL;
375c1c8a 3717 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3718 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3719
3720 return insn;
3721}
6e911104 3722
31d3e01c 3723/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3724
3725static rtx
35cb5232 3726make_call_insn_raw (rtx pattern)
6e911104 3727{
19cb6b50 3728 rtx insn;
6e911104 3729
3730 insn = rtx_alloc (CALL_INSN);
3731 INSN_UID (insn) = cur_insn_uid++;
3732
3733 PATTERN (insn) = pattern;
3734 INSN_CODE (insn) = -1;
6e911104 3735 REG_NOTES (insn) = NULL;
3736 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
375c1c8a 3737 INSN_LOCATOR (insn) = curr_insn_locator ();
ab87d1bc 3738 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3739
3740 return insn;
3741}
15bbde2b 3742\f
3743/* Add INSN to the end of the doubly-linked list.
3744 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3745
3746void
35cb5232 3747add_insn (rtx insn)
15bbde2b 3748{
06f9d6ef 3749 PREV_INSN (insn) = get_last_insn();
15bbde2b 3750 NEXT_INSN (insn) = 0;
3751
06f9d6ef 3752 if (NULL != get_last_insn())
3753 NEXT_INSN (get_last_insn ()) = insn;
15bbde2b 3754
06f9d6ef 3755 if (NULL == get_insns ())
3756 set_first_insn (insn);
15bbde2b 3757
06f9d6ef 3758 set_last_insn (insn);
15bbde2b 3759}
3760
312de84d 3761/* Add INSN into the doubly-linked list after insn AFTER. This and
3762 the next should be the only functions called to insert an insn once
f65c10c0 3763 delay slots have been filled since only they know how to update a
312de84d 3764 SEQUENCE. */
15bbde2b 3765
3766void
3072d30e 3767add_insn_after (rtx insn, rtx after, basic_block bb)
15bbde2b 3768{
3769 rtx next = NEXT_INSN (after);
3770
611234b4 3771 gcc_assert (!optimize || !INSN_DELETED_P (after));
f65c10c0 3772
15bbde2b 3773 NEXT_INSN (insn) = next;
3774 PREV_INSN (insn) = after;
3775
3776 if (next)
3777 {
3778 PREV_INSN (next) = insn;
6d7dc5b9 3779 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
15bbde2b 3780 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3781 }
06f9d6ef 3782 else if (get_last_insn () == after)
3783 set_last_insn (insn);
15bbde2b 3784 else
3785 {
0a893c29 3786 struct sequence_stack *stack = seq_stack;
15bbde2b 3787 /* Scan all pending sequences too. */
3788 for (; stack; stack = stack->next)
3789 if (after == stack->last)
398f4855 3790 {
3791 stack->last = insn;
3792 break;
3793 }
312de84d 3794
611234b4 3795 gcc_assert (stack);
15bbde2b 3796 }
3797
6d7dc5b9 3798 if (!BARRIER_P (after)
3799 && !BARRIER_P (insn)
9dda7915 3800 && (bb = BLOCK_FOR_INSN (after)))
3801 {
3802 set_block_for_insn (insn, bb);
308f9b79 3803 if (INSN_P (insn))
3072d30e 3804 df_insn_rescan (insn);
9dda7915 3805 /* Should not happen as first in the BB is always
3fb1e43b 3806 either NOTE or LABEL. */
5496dbfc 3807 if (BB_END (bb) == after
9dda7915 3808 /* Avoid clobbering of structure when creating new BB. */
6d7dc5b9 3809 && !BARRIER_P (insn)
ad4583d9 3810 && !NOTE_INSN_BASIC_BLOCK_P (insn))
5496dbfc 3811 BB_END (bb) = insn;
9dda7915 3812 }
3813
15bbde2b 3814 NEXT_INSN (after) = insn;
6d7dc5b9 3815 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
15bbde2b 3816 {
3817 rtx sequence = PATTERN (after);
3818 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3819 }
3820}
3821
312de84d 3822/* Add INSN into the doubly-linked list before insn BEFORE. This and
3072d30e 3823 the previous should be the only functions called to insert an insn
3824 once delay slots have been filled since only they know how to
3825 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3826 bb from before. */
312de84d 3827
3828void
3072d30e 3829add_insn_before (rtx insn, rtx before, basic_block bb)
312de84d 3830{
3831 rtx prev = PREV_INSN (before);
3832
611234b4 3833 gcc_assert (!optimize || !INSN_DELETED_P (before));
f65c10c0 3834
312de84d 3835 PREV_INSN (insn) = prev;
3836 NEXT_INSN (insn) = before;
3837
3838 if (prev)
3839 {
3840 NEXT_INSN (prev) = insn;
6d7dc5b9 3841 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
312de84d 3842 {
3843 rtx sequence = PATTERN (prev);
3844 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3845 }
3846 }
06f9d6ef 3847 else if (get_insns () == before)
3848 set_first_insn (insn);
312de84d 3849 else
3850 {
0a893c29 3851 struct sequence_stack *stack = seq_stack;
312de84d 3852 /* Scan all pending sequences too. */
3853 for (; stack; stack = stack->next)
3854 if (before == stack->first)
398f4855 3855 {
3856 stack->first = insn;
3857 break;
3858 }
312de84d 3859
611234b4 3860 gcc_assert (stack);
312de84d 3861 }
3862
48e1416a 3863 if (!bb
3072d30e 3864 && !BARRIER_P (before)
3865 && !BARRIER_P (insn))
3866 bb = BLOCK_FOR_INSN (before);
3867
3868 if (bb)
9dda7915 3869 {
3870 set_block_for_insn (insn, bb);
308f9b79 3871 if (INSN_P (insn))
3072d30e 3872 df_insn_rescan (insn);
611234b4 3873 /* Should not happen as first in the BB is always either NOTE or
ba821eb1 3874 LABEL. */
611234b4 3875 gcc_assert (BB_HEAD (bb) != insn
3876 /* Avoid clobbering of structure when creating new BB. */
3877 || BARRIER_P (insn)
ad4583d9 3878 || NOTE_INSN_BASIC_BLOCK_P (insn));
9dda7915 3879 }
3880
312de84d 3881 PREV_INSN (before) = insn;
6d7dc5b9 3882 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
312de84d 3883 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3884}
3885
3072d30e 3886
3887/* Replace insn with an deleted instruction note. */
3888
fc3d1695 3889void
3890set_insn_deleted (rtx insn)
3072d30e 3891{
3892 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3893 PUT_CODE (insn, NOTE);
3894 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3895}
3896
3897
7ddcf2bf 3898/* Remove an insn from its doubly-linked list. This function knows how
3899 to handle sequences. */
3900void
35cb5232 3901remove_insn (rtx insn)
7ddcf2bf 3902{
3903 rtx next = NEXT_INSN (insn);
3904 rtx prev = PREV_INSN (insn);
e4bf866d 3905 basic_block bb;
3906
3072d30e 3907 /* Later in the code, the block will be marked dirty. */
3908 df_insn_delete (NULL, INSN_UID (insn));
3909
7ddcf2bf 3910 if (prev)
3911 {
3912 NEXT_INSN (prev) = next;
6d7dc5b9 3913 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
7ddcf2bf 3914 {
3915 rtx sequence = PATTERN (prev);
3916 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3917 }
3918 }
06f9d6ef 3919 else if (get_insns () == insn)
3920 {
c8f0c143 3921 if (next)
3922 PREV_INSN (next) = NULL;
06f9d6ef 3923 set_first_insn (next);
3924 }
7ddcf2bf 3925 else
3926 {
0a893c29 3927 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3928 /* Scan all pending sequences too. */
3929 for (; stack; stack = stack->next)
3930 if (insn == stack->first)
3931 {
3932 stack->first = next;
3933 break;
3934 }
3935
611234b4 3936 gcc_assert (stack);
7ddcf2bf 3937 }
3938
3939 if (next)
3940 {
3941 PREV_INSN (next) = prev;
6d7dc5b9 3942 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
7ddcf2bf 3943 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3944 }
06f9d6ef 3945 else if (get_last_insn () == insn)
3946 set_last_insn (prev);
7ddcf2bf 3947 else
3948 {
0a893c29 3949 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3950 /* Scan all pending sequences too. */
3951 for (; stack; stack = stack->next)
3952 if (insn == stack->last)
3953 {
3954 stack->last = prev;
3955 break;
3956 }
3957
611234b4 3958 gcc_assert (stack);
7ddcf2bf 3959 }
6d7dc5b9 3960 if (!BARRIER_P (insn)
e4bf866d 3961 && (bb = BLOCK_FOR_INSN (insn)))
3962 {
137b701d 3963 if (NONDEBUG_INSN_P (insn))
3072d30e 3964 df_set_bb_dirty (bb);
5496dbfc 3965 if (BB_HEAD (bb) == insn)
e4bf866d 3966 {
f4aee538 3967 /* Never ever delete the basic block note without deleting whole
3968 basic block. */
611234b4 3969 gcc_assert (!NOTE_P (insn));
5496dbfc 3970 BB_HEAD (bb) = next;
e4bf866d 3971 }
5496dbfc 3972 if (BB_END (bb) == insn)
3973 BB_END (bb) = prev;
e4bf866d 3974 }
7ddcf2bf 3975}
3976
d5f9786f 3977/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3978
3979void
35cb5232 3980add_function_usage_to (rtx call_insn, rtx call_fusage)
d5f9786f 3981{
611234b4 3982 gcc_assert (call_insn && CALL_P (call_insn));
d5f9786f 3983
3984 /* Put the register usage information on the CALL. If there is already
3985 some usage information, put ours at the end. */
3986 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3987 {
3988 rtx link;
3989
3990 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3991 link = XEXP (link, 1))
3992 ;
3993
3994 XEXP (link, 1) = call_fusage;
3995 }
3996 else
3997 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3998}
3999
15bbde2b 4000/* Delete all insns made since FROM.
4001 FROM becomes the new last instruction. */
4002
4003void
35cb5232 4004delete_insns_since (rtx from)
15bbde2b 4005{
4006 if (from == 0)
06f9d6ef 4007 set_first_insn (0);
15bbde2b 4008 else
4009 NEXT_INSN (from) = 0;
06f9d6ef 4010 set_last_insn (from);
15bbde2b 4011}
4012
34e2ddcd 4013/* This function is deprecated, please use sequences instead.
4014
4015 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 4016 The insns to be moved are those between FROM and TO.
4017 They are moved to a new position after the insn AFTER.
4018 AFTER must not be FROM or TO or any insn in between.
4019
4020 This function does not know about SEQUENCEs and hence should not be
4021 called after delay-slot filling has been done. */
4022
4023void
35cb5232 4024reorder_insns_nobb (rtx from, rtx to, rtx after)
15bbde2b 4025{
7f6ca11f 4026#ifdef ENABLE_CHECKING
4027 rtx x;
4028 for (x = from; x != to; x = NEXT_INSN (x))
4029 gcc_assert (after != x);
4030 gcc_assert (after != to);
4031#endif
4032
15bbde2b 4033 /* Splice this bunch out of where it is now. */
4034 if (PREV_INSN (from))
4035 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4036 if (NEXT_INSN (to))
4037 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
06f9d6ef 4038 if (get_last_insn () == to)
4039 set_last_insn (PREV_INSN (from));
4040 if (get_insns () == from)
4041 set_first_insn (NEXT_INSN (to));
15bbde2b 4042
4043 /* Make the new neighbors point to it and it to them. */
4044 if (NEXT_INSN (after))
4045 PREV_INSN (NEXT_INSN (after)) = to;
4046
4047 NEXT_INSN (to) = NEXT_INSN (after);
4048 PREV_INSN (from) = after;
4049 NEXT_INSN (after) = from;
06f9d6ef 4050 if (after == get_last_insn())
4051 set_last_insn (to);
15bbde2b 4052}
4053
9dda7915 4054/* Same as function above, but take care to update BB boundaries. */
4055void
35cb5232 4056reorder_insns (rtx from, rtx to, rtx after)
9dda7915 4057{
4058 rtx prev = PREV_INSN (from);
4059 basic_block bb, bb2;
4060
4061 reorder_insns_nobb (from, to, after);
4062
6d7dc5b9 4063 if (!BARRIER_P (after)
9dda7915 4064 && (bb = BLOCK_FOR_INSN (after)))
4065 {
4066 rtx x;
3072d30e 4067 df_set_bb_dirty (bb);
d4c5e26d 4068
6d7dc5b9 4069 if (!BARRIER_P (from)
9dda7915 4070 && (bb2 = BLOCK_FOR_INSN (from)))
4071 {
5496dbfc 4072 if (BB_END (bb2) == to)
4073 BB_END (bb2) = prev;
3072d30e 4074 df_set_bb_dirty (bb2);
9dda7915 4075 }
4076
5496dbfc 4077 if (BB_END (bb) == after)
4078 BB_END (bb) = to;
9dda7915 4079
4080 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7097dd0c 4081 if (!BARRIER_P (x))
a2bdd643 4082 df_insn_change_bb (x, bb);
9dda7915 4083 }
4084}
4085
15bbde2b 4086\f
31d3e01c 4087/* Emit insn(s) of given code and pattern
4088 at a specified place within the doubly-linked list.
15bbde2b 4089
31d3e01c 4090 All of the emit_foo global entry points accept an object
4091 X which is either an insn list or a PATTERN of a single
4092 instruction.
15bbde2b 4093
31d3e01c 4094 There are thus a few canonical ways to generate code and
4095 emit it at a specific place in the instruction stream. For
4096 example, consider the instruction named SPOT and the fact that
4097 we would like to emit some instructions before SPOT. We might
4098 do it like this:
15bbde2b 4099
31d3e01c 4100 start_sequence ();
4101 ... emit the new instructions ...
4102 insns_head = get_insns ();
4103 end_sequence ();
15bbde2b 4104
31d3e01c 4105 emit_insn_before (insns_head, SPOT);
15bbde2b 4106
31d3e01c 4107 It used to be common to generate SEQUENCE rtl instead, but that
4108 is a relic of the past which no longer occurs. The reason is that
4109 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4110 generated would almost certainly die right after it was created. */
15bbde2b 4111
5f7c5ddd 4112static rtx
4113emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4114 rtx (*make_raw) (rtx))
15bbde2b 4115{
19cb6b50 4116 rtx insn;
15bbde2b 4117
611234b4 4118 gcc_assert (before);
31d3e01c 4119
4120 if (x == NULL_RTX)
4121 return last;
4122
4123 switch (GET_CODE (x))
15bbde2b 4124 {
9845d120 4125 case DEBUG_INSN:
31d3e01c 4126 case INSN:
4127 case JUMP_INSN:
4128 case CALL_INSN:
4129 case CODE_LABEL:
4130 case BARRIER:
4131 case NOTE:
4132 insn = x;
4133 while (insn)
4134 {
4135 rtx next = NEXT_INSN (insn);
3072d30e 4136 add_insn_before (insn, before, bb);
31d3e01c 4137 last = insn;
4138 insn = next;
4139 }
4140 break;
4141
4142#ifdef ENABLE_RTL_CHECKING
4143 case SEQUENCE:
611234b4 4144 gcc_unreachable ();
31d3e01c 4145 break;
4146#endif
4147
4148 default:
5f7c5ddd 4149 last = (*make_raw) (x);
3072d30e 4150 add_insn_before (last, before, bb);
31d3e01c 4151 break;
15bbde2b 4152 }
4153
31d3e01c 4154 return last;
15bbde2b 4155}
4156
5f7c5ddd 4157/* Make X be output before the instruction BEFORE. */
4158
4159rtx
4160emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4161{
4162 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4163}
4164
31d3e01c 4165/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4166 and output it before the instruction BEFORE. */
4167
4168rtx
0891f67c 4169emit_jump_insn_before_noloc (rtx x, rtx before)
15bbde2b 4170{
5f7c5ddd 4171 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4172 make_jump_insn_raw);
15bbde2b 4173}
4174
31d3e01c 4175/* Make an instruction with body X and code CALL_INSN
cd0fe062 4176 and output it before the instruction BEFORE. */
4177
4178rtx
0891f67c 4179emit_call_insn_before_noloc (rtx x, rtx before)
cd0fe062 4180{
5f7c5ddd 4181 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4182 make_call_insn_raw);
cd0fe062 4183}
4184
9845d120 4185/* Make an instruction with body X and code DEBUG_INSN
4186 and output it before the instruction BEFORE. */
4187
4188rtx
4189emit_debug_insn_before_noloc (rtx x, rtx before)
4190{
5f7c5ddd 4191 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4192 make_debug_insn_raw);
9845d120 4193}
4194
15bbde2b 4195/* Make an insn of code BARRIER
71caadc0 4196 and output it before the insn BEFORE. */
15bbde2b 4197
4198rtx
35cb5232 4199emit_barrier_before (rtx before)
15bbde2b 4200{
19cb6b50 4201 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4202
4203 INSN_UID (insn) = cur_insn_uid++;
4204
3072d30e 4205 add_insn_before (insn, before, NULL);
15bbde2b 4206 return insn;
4207}
4208
71caadc0 4209/* Emit the label LABEL before the insn BEFORE. */
4210
4211rtx
35cb5232 4212emit_label_before (rtx label, rtx before)
71caadc0 4213{
596ef494 4214 gcc_checking_assert (INSN_UID (label) == 0);
4215 INSN_UID (label) = cur_insn_uid++;
4216 add_insn_before (label, before, NULL);
71caadc0 4217 return label;
4218}
4219
15bbde2b 4220/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4221
4222rtx
ad4583d9 4223emit_note_before (enum insn_note subtype, rtx before)
15bbde2b 4224{
19cb6b50 4225 rtx note = rtx_alloc (NOTE);
15bbde2b 4226 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4227 NOTE_KIND (note) = subtype;
ab87d1bc 4228 BLOCK_FOR_INSN (note) = NULL;
60ad3b0e 4229 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
15bbde2b 4230
3072d30e 4231 add_insn_before (note, before, NULL);
15bbde2b 4232 return note;
4233}
4234\f
31d3e01c 4235/* Helper for emit_insn_after, handles lists of instructions
4236 efficiently. */
15bbde2b 4237
31d3e01c 4238static rtx
3072d30e 4239emit_insn_after_1 (rtx first, rtx after, basic_block bb)
15bbde2b 4240{
31d3e01c 4241 rtx last;
4242 rtx after_after;
3072d30e 4243 if (!bb && !BARRIER_P (after))
4244 bb = BLOCK_FOR_INSN (after);
15bbde2b 4245
3072d30e 4246 if (bb)
15bbde2b 4247 {
3072d30e 4248 df_set_bb_dirty (bb);
31d3e01c 4249 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
6d7dc5b9 4250 if (!BARRIER_P (last))
3072d30e 4251 {
4252 set_block_for_insn (last, bb);
4253 df_insn_rescan (last);
4254 }
6d7dc5b9 4255 if (!BARRIER_P (last))
3072d30e 4256 {
4257 set_block_for_insn (last, bb);
4258 df_insn_rescan (last);
4259 }
5496dbfc 4260 if (BB_END (bb) == after)
4261 BB_END (bb) = last;
15bbde2b 4262 }
4263 else
31d3e01c 4264 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4265 continue;
4266
4267 after_after = NEXT_INSN (after);
4268
4269 NEXT_INSN (after) = first;
4270 PREV_INSN (first) = after;
4271 NEXT_INSN (last) = after_after;
4272 if (after_after)
4273 PREV_INSN (after_after) = last;
4274
06f9d6ef 4275 if (after == get_last_insn())
4276 set_last_insn (last);
e1ab7874 4277
31d3e01c 4278 return last;
4279}
4280
5f7c5ddd 4281static rtx
4282emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4283 rtx (*make_raw)(rtx))
31d3e01c 4284{
4285 rtx last = after;
4286
611234b4 4287 gcc_assert (after);
31d3e01c 4288
4289 if (x == NULL_RTX)
4290 return last;
4291
4292 switch (GET_CODE (x))
15bbde2b 4293 {
9845d120 4294 case DEBUG_INSN:
31d3e01c 4295 case INSN:
4296 case JUMP_INSN:
4297 case CALL_INSN:
4298 case CODE_LABEL:
4299 case BARRIER:
4300 case NOTE:
3072d30e 4301 last = emit_insn_after_1 (x, after, bb);
31d3e01c 4302 break;
4303
4304#ifdef ENABLE_RTL_CHECKING
4305 case SEQUENCE:
611234b4 4306 gcc_unreachable ();
31d3e01c 4307 break;
4308#endif
4309
4310 default:
5f7c5ddd 4311 last = (*make_raw) (x);
3072d30e 4312 add_insn_after (last, after, bb);
31d3e01c 4313 break;
15bbde2b 4314 }
4315
31d3e01c 4316 return last;
15bbde2b 4317}
4318
5f7c5ddd 4319/* Make X be output after the insn AFTER and set the BB of insn. If
4320 BB is NULL, an attempt is made to infer the BB from AFTER. */
4321
4322rtx
4323emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4324{
4325 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4326}
4327
1bea98fb 4328
31d3e01c 4329/* Make an insn of code JUMP_INSN with body X
15bbde2b 4330 and output it after the insn AFTER. */
4331
4332rtx
0891f67c 4333emit_jump_insn_after_noloc (rtx x, rtx after)
15bbde2b 4334{
5f7c5ddd 4335 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
31d3e01c 4336}
4337
4338/* Make an instruction with body X and code CALL_INSN
4339 and output it after the instruction AFTER. */
4340
4341rtx
0891f67c 4342emit_call_insn_after_noloc (rtx x, rtx after)
31d3e01c 4343{
5f7c5ddd 4344 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
15bbde2b 4345}
4346
9845d120 4347/* Make an instruction with body X and code CALL_INSN
4348 and output it after the instruction AFTER. */
4349
4350rtx
4351emit_debug_insn_after_noloc (rtx x, rtx after)
4352{
5f7c5ddd 4353 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
9845d120 4354}
4355
15bbde2b 4356/* Make an insn of code BARRIER
4357 and output it after the insn AFTER. */
4358
4359rtx
35cb5232 4360emit_barrier_after (rtx after)
15bbde2b 4361{
19cb6b50 4362 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4363
4364 INSN_UID (insn) = cur_insn_uid++;
4365
3072d30e 4366 add_insn_after (insn, after, NULL);
15bbde2b 4367 return insn;
4368}
4369
4370/* Emit the label LABEL after the insn AFTER. */
4371
4372rtx
35cb5232 4373emit_label_after (rtx label, rtx after)
15bbde2b 4374{
596ef494 4375 gcc_checking_assert (INSN_UID (label) == 0);
4376 INSN_UID (label) = cur_insn_uid++;
4377 add_insn_after (label, after, NULL);
15bbde2b 4378 return label;
4379}
4380
4381/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4382
4383rtx
ad4583d9 4384emit_note_after (enum insn_note subtype, rtx after)
15bbde2b 4385{
19cb6b50 4386 rtx note = rtx_alloc (NOTE);
15bbde2b 4387 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4388 NOTE_KIND (note) = subtype;
ab87d1bc 4389 BLOCK_FOR_INSN (note) = NULL;
60ad3b0e 4390 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3072d30e 4391 add_insn_after (note, after, NULL);
15bbde2b 4392 return note;
4393}
15bbde2b 4394\f
ede4ebcb 4395/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4396 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4397
4398static rtx
4399emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4400 rtx (*make_raw) (rtx))
d321a68b 4401{
ede4ebcb 4402 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
d321a68b 4403
0891f67c 4404 if (pattern == NULL_RTX || !loc)
ca154f3f 4405 return last;
4406
31d3e01c 4407 after = NEXT_INSN (after);
4408 while (1)
4409 {
0891f67c 4410 if (active_insn_p (after) && !INSN_LOCATOR (after))
13751393 4411 INSN_LOCATOR (after) = loc;
31d3e01c 4412 if (after == last)
4413 break;
4414 after = NEXT_INSN (after);
4415 }
d321a68b 4416 return last;
4417}
4418
ede4ebcb 4419/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4420 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4421 any DEBUG_INSNs. */
4422
4423static rtx
4424emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4425 rtx (*make_raw) (rtx))
0891f67c 4426{
9845d120 4427 rtx prev = after;
4428
ede4ebcb 4429 if (skip_debug_insns)
4430 while (DEBUG_INSN_P (prev))
4431 prev = PREV_INSN (prev);
9845d120 4432
4433 if (INSN_P (prev))
ede4ebcb 4434 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4435 make_raw);
0891f67c 4436 else
ede4ebcb 4437 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0891f67c 4438}
4439
ede4ebcb 4440/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
d321a68b 4441rtx
ede4ebcb 4442emit_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4443{
ede4ebcb 4444 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4445}
31d3e01c 4446
ede4ebcb 4447/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4448rtx
4449emit_insn_after (rtx pattern, rtx after)
4450{
4451 return emit_pattern_after (pattern, after, true, make_insn_raw);
4452}
ca154f3f 4453
ede4ebcb 4454/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4455rtx
4456emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4457{
4458 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
d321a68b 4459}
4460
0891f67c 4461/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4462rtx
4463emit_jump_insn_after (rtx pattern, rtx after)
4464{
ede4ebcb 4465 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
0891f67c 4466}
4467
ede4ebcb 4468/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
d321a68b 4469rtx
35cb5232 4470emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
d321a68b 4471{
ede4ebcb 4472 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
d321a68b 4473}
4474
0891f67c 4475/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4476rtx
4477emit_call_insn_after (rtx pattern, rtx after)
4478{
ede4ebcb 4479 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
0891f67c 4480}
4481
ede4ebcb 4482/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
9845d120 4483rtx
4484emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4485{
ede4ebcb 4486 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
9845d120 4487}
4488
4489/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4490rtx
4491emit_debug_insn_after (rtx pattern, rtx after)
4492{
ede4ebcb 4493 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
9845d120 4494}
4495
ede4ebcb 4496/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4497 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4498 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4499 CALL_INSN, etc. */
4500
4501static rtx
4502emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4503 rtx (*make_raw) (rtx))
d321a68b 4504{
4505 rtx first = PREV_INSN (before);
ede4ebcb 4506 rtx last = emit_pattern_before_noloc (pattern, before,
4507 insnp ? before : NULL_RTX,
4508 NULL, make_raw);
0891f67c 4509
4510 if (pattern == NULL_RTX || !loc)
4511 return last;
4512
4486418e 4513 if (!first)
4514 first = get_insns ();
4515 else
4516 first = NEXT_INSN (first);
0891f67c 4517 while (1)
4518 {
4519 if (active_insn_p (first) && !INSN_LOCATOR (first))
4520 INSN_LOCATOR (first) = loc;
4521 if (first == last)
4522 break;
4523 first = NEXT_INSN (first);
4524 }
4525 return last;
4526}
4527
ede4ebcb 4528/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4529 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4530 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4531 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4532
4533static rtx
4534emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4535 bool insnp, rtx (*make_raw) (rtx))
0891f67c 4536{
9845d120 4537 rtx next = before;
4538
ede4ebcb 4539 if (skip_debug_insns)
4540 while (DEBUG_INSN_P (next))
4541 next = PREV_INSN (next);
9845d120 4542
4543 if (INSN_P (next))
ede4ebcb 4544 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4545 insnp, make_raw);
0891f67c 4546 else
ede4ebcb 4547 return emit_pattern_before_noloc (pattern, before,
4548 insnp ? before : NULL_RTX,
4549 NULL, make_raw);
0891f67c 4550}
4551
ede4ebcb 4552/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
0891f67c 4553rtx
ede4ebcb 4554emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0891f67c 4555{
ede4ebcb 4556 return emit_pattern_before_setloc (pattern, before, loc, true,
4557 make_insn_raw);
4558}
0891f67c 4559
ede4ebcb 4560/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4561rtx
4562emit_insn_before (rtx pattern, rtx before)
4563{
4564 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4565}
0891f67c 4566
ede4ebcb 4567/* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4568rtx
4569emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4570{
4571 return emit_pattern_before_setloc (pattern, before, loc, false,
4572 make_jump_insn_raw);
0891f67c 4573}
4574
4575/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4576rtx
4577emit_jump_insn_before (rtx pattern, rtx before)
4578{
ede4ebcb 4579 return emit_pattern_before (pattern, before, true, false,
4580 make_jump_insn_raw);
0891f67c 4581}
4582
ede4ebcb 4583/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
0891f67c 4584rtx
4585emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4586{
ede4ebcb 4587 return emit_pattern_before_setloc (pattern, before, loc, false,
4588 make_call_insn_raw);
d321a68b 4589}
0891f67c 4590
ede4ebcb 4591/* Like emit_call_insn_before_noloc,
4592 but set insn_locator according to BEFORE. */
0891f67c 4593rtx
4594emit_call_insn_before (rtx pattern, rtx before)
4595{
ede4ebcb 4596 return emit_pattern_before (pattern, before, true, false,
4597 make_call_insn_raw);
0891f67c 4598}
9845d120 4599
ede4ebcb 4600/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
9845d120 4601rtx
4602emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4603{
ede4ebcb 4604 return emit_pattern_before_setloc (pattern, before, loc, false,
4605 make_debug_insn_raw);
9845d120 4606}
4607
ede4ebcb 4608/* Like emit_debug_insn_before_noloc,
4609 but set insn_locator according to BEFORE. */
9845d120 4610rtx
4611emit_debug_insn_before (rtx pattern, rtx before)
4612{
ede4ebcb 4613 return emit_pattern_before (pattern, before, false, false,
4614 make_debug_insn_raw);
9845d120 4615}
d321a68b 4616\f
31d3e01c 4617/* Take X and emit it at the end of the doubly-linked
4618 INSN list.
15bbde2b 4619
4620 Returns the last insn emitted. */
4621
4622rtx
35cb5232 4623emit_insn (rtx x)
15bbde2b 4624{
06f9d6ef 4625 rtx last = get_last_insn();
31d3e01c 4626 rtx insn;
15bbde2b 4627
31d3e01c 4628 if (x == NULL_RTX)
4629 return last;
15bbde2b 4630
31d3e01c 4631 switch (GET_CODE (x))
4632 {
9845d120 4633 case DEBUG_INSN:
31d3e01c 4634 case INSN:
4635 case JUMP_INSN:
4636 case CALL_INSN:
4637 case CODE_LABEL:
4638 case BARRIER:
4639 case NOTE:
4640 insn = x;
4641 while (insn)
15bbde2b 4642 {
31d3e01c 4643 rtx next = NEXT_INSN (insn);
15bbde2b 4644 add_insn (insn);
31d3e01c 4645 last = insn;
4646 insn = next;
15bbde2b 4647 }
31d3e01c 4648 break;
15bbde2b 4649
31d3e01c 4650#ifdef ENABLE_RTL_CHECKING
4651 case SEQUENCE:
611234b4 4652 gcc_unreachable ();
31d3e01c 4653 break;
4654#endif
15bbde2b 4655
31d3e01c 4656 default:
4657 last = make_insn_raw (x);
4658 add_insn (last);
4659 break;
15bbde2b 4660 }
4661
4662 return last;
4663}
4664
9845d120 4665/* Make an insn of code DEBUG_INSN with pattern X
4666 and add it to the end of the doubly-linked list. */
4667
4668rtx
4669emit_debug_insn (rtx x)
4670{
06f9d6ef 4671 rtx last = get_last_insn();
9845d120 4672 rtx insn;
4673
4674 if (x == NULL_RTX)
4675 return last;
4676
4677 switch (GET_CODE (x))
4678 {
4679 case DEBUG_INSN:
4680 case INSN:
4681 case JUMP_INSN:
4682 case CALL_INSN:
4683 case CODE_LABEL:
4684 case BARRIER:
4685 case NOTE:
4686 insn = x;
4687 while (insn)
4688 {
4689 rtx next = NEXT_INSN (insn);
4690 add_insn (insn);
4691 last = insn;
4692 insn = next;
4693 }
4694 break;
4695
4696#ifdef ENABLE_RTL_CHECKING
4697 case SEQUENCE:
4698 gcc_unreachable ();
4699 break;
4700#endif
4701
4702 default:
4703 last = make_debug_insn_raw (x);
4704 add_insn (last);
4705 break;
4706 }
4707
4708 return last;
4709}
4710
31d3e01c 4711/* Make an insn of code JUMP_INSN with pattern X
4712 and add it to the end of the doubly-linked list. */
15bbde2b 4713
4714rtx
35cb5232 4715emit_jump_insn (rtx x)
15bbde2b 4716{
d90b3d04 4717 rtx last = NULL_RTX, insn;
15bbde2b 4718
31d3e01c 4719 switch (GET_CODE (x))
15bbde2b 4720 {
9845d120 4721 case DEBUG_INSN:
31d3e01c 4722 case INSN:
4723 case JUMP_INSN:
4724 case CALL_INSN:
4725 case CODE_LABEL:
4726 case BARRIER:
4727 case NOTE:
4728 insn = x;
4729 while (insn)
4730 {
4731 rtx next = NEXT_INSN (insn);
4732 add_insn (insn);
4733 last = insn;
4734 insn = next;
4735 }
4736 break;
b36b07d8 4737
31d3e01c 4738#ifdef ENABLE_RTL_CHECKING
4739 case SEQUENCE:
611234b4 4740 gcc_unreachable ();
31d3e01c 4741 break;
4742#endif
b36b07d8 4743
31d3e01c 4744 default:
4745 last = make_jump_insn_raw (x);
4746 add_insn (last);
4747 break;
9dda7915 4748 }
b36b07d8 4749
4750 return last;
4751}
4752
31d3e01c 4753/* Make an insn of code CALL_INSN with pattern X
15bbde2b 4754 and add it to the end of the doubly-linked list. */
4755
4756rtx
35cb5232 4757emit_call_insn (rtx x)
15bbde2b 4758{
31d3e01c 4759 rtx insn;
4760
4761 switch (GET_CODE (x))
15bbde2b 4762 {
9845d120 4763 case DEBUG_INSN:
31d3e01c 4764 case INSN:
4765 case JUMP_INSN:
4766 case CALL_INSN:
4767 case CODE_LABEL:
4768 case BARRIER:
4769 case NOTE:
4770 insn = emit_insn (x);
4771 break;
15bbde2b 4772
31d3e01c 4773#ifdef ENABLE_RTL_CHECKING
4774 case SEQUENCE:
611234b4 4775 gcc_unreachable ();
31d3e01c 4776 break;
4777#endif
15bbde2b 4778
31d3e01c 4779 default:
4780 insn = make_call_insn_raw (x);
15bbde2b 4781 add_insn (insn);
31d3e01c 4782 break;
15bbde2b 4783 }
31d3e01c 4784
4785 return insn;
15bbde2b 4786}
4787
4788/* Add the label LABEL to the end of the doubly-linked list. */
4789
4790rtx
35cb5232 4791emit_label (rtx label)
15bbde2b 4792{
596ef494 4793 gcc_checking_assert (INSN_UID (label) == 0);
4794 INSN_UID (label) = cur_insn_uid++;
4795 add_insn (label);
15bbde2b 4796 return label;
4797}
4798
4799/* Make an insn of code BARRIER
4800 and add it to the end of the doubly-linked list. */
4801
4802rtx
35cb5232 4803emit_barrier (void)
15bbde2b 4804{
19cb6b50 4805 rtx barrier = rtx_alloc (BARRIER);
15bbde2b 4806 INSN_UID (barrier) = cur_insn_uid++;
4807 add_insn (barrier);
4808 return barrier;
4809}
4810
2f57e3d9 4811/* Emit a copy of note ORIG. */
35cb5232 4812
2f57e3d9 4813rtx
4814emit_note_copy (rtx orig)
4815{
4816 rtx note;
48e1416a 4817
2f57e3d9 4818 note = rtx_alloc (NOTE);
48e1416a 4819
2f57e3d9 4820 INSN_UID (note) = cur_insn_uid++;
4821 NOTE_DATA (note) = NOTE_DATA (orig);
ad4583d9 4822 NOTE_KIND (note) = NOTE_KIND (orig);
2f57e3d9 4823 BLOCK_FOR_INSN (note) = NULL;
4824 add_insn (note);
48e1416a 4825
31b97e8f 4826 return note;
15bbde2b 4827}
4828
31b97e8f 4829/* Make an insn of code NOTE or type NOTE_NO
4830 and add it to the end of the doubly-linked list. */
15bbde2b 4831
4832rtx
ad4583d9 4833emit_note (enum insn_note kind)
15bbde2b 4834{
19cb6b50 4835 rtx note;
15bbde2b 4836
15bbde2b 4837 note = rtx_alloc (NOTE);
4838 INSN_UID (note) = cur_insn_uid++;
ad4583d9 4839 NOTE_KIND (note) = kind;
6c7786cb 4840 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ab87d1bc 4841 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4842 add_insn (note);
4843 return note;
4844}
4845
18b42941 4846/* Emit a clobber of lvalue X. */
4847
4848rtx
4849emit_clobber (rtx x)
4850{
4851 /* CONCATs should not appear in the insn stream. */
4852 if (GET_CODE (x) == CONCAT)
4853 {
4854 emit_clobber (XEXP (x, 0));
4855 return emit_clobber (XEXP (x, 1));
4856 }
4857 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4858}
4859
4860/* Return a sequence of insns to clobber lvalue X. */
4861
4862rtx
4863gen_clobber (rtx x)
4864{
4865 rtx seq;
4866
4867 start_sequence ();
4868 emit_clobber (x);
4869 seq = get_insns ();
4870 end_sequence ();
4871 return seq;
4872}
4873
4874/* Emit a use of rvalue X. */
4875
4876rtx
4877emit_use (rtx x)
4878{
4879 /* CONCATs should not appear in the insn stream. */
4880 if (GET_CODE (x) == CONCAT)
4881 {
4882 emit_use (XEXP (x, 0));
4883 return emit_use (XEXP (x, 1));
4884 }
4885 return emit_insn (gen_rtx_USE (VOIDmode, x));
4886}
4887
4888/* Return a sequence of insns to use rvalue X. */
4889
4890rtx
4891gen_use (rtx x)
4892{
4893 rtx seq;
4894
4895 start_sequence ();
4896 emit_use (x);
4897 seq = get_insns ();
4898 end_sequence ();
4899 return seq;
4900}
4901
f1934a33 4902/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 4903 note of this type already exists, remove it first. */
f1934a33 4904
c080d8f0 4905rtx
35cb5232 4906set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
f1934a33 4907{
4908 rtx note = find_reg_note (insn, kind, NULL_RTX);
4909
7e6224ab 4910 switch (kind)
4911 {
4912 case REG_EQUAL:
4913 case REG_EQUIV:
4914 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4915 has multiple sets (some callers assume single_set
4916 means the insn only has one set, when in fact it
4917 means the insn only has one * useful * set). */
4918 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4919 {
611234b4 4920 gcc_assert (!note);
7e6224ab 4921 return NULL_RTX;
4922 }
4923
4924 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4925 It serves no useful purpose and breaks eliminate_regs. */
4926 if (GET_CODE (datum) == ASM_OPERANDS)
4927 return NULL_RTX;
3072d30e 4928
4929 if (note)
4930 {
4931 XEXP (note, 0) = datum;
4932 df_notes_rescan (insn);
4933 return note;
4934 }
7e6224ab 4935 break;
4936
4937 default:
3072d30e 4938 if (note)
4939 {
4940 XEXP (note, 0) = datum;
4941 return note;
4942 }
7e6224ab 4943 break;
4944 }
c080d8f0 4945
a1ddb869 4946 add_reg_note (insn, kind, datum);
3072d30e 4947
4948 switch (kind)
c080d8f0 4949 {
3072d30e 4950 case REG_EQUAL:
4951 case REG_EQUIV:
4952 df_notes_rescan (insn);
4953 break;
4954 default:
4955 break;
c080d8f0 4956 }
f1934a33 4957
c080d8f0 4958 return REG_NOTES (insn);
f1934a33 4959}
41cf444a 4960
4961/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
4962rtx
4963set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
4964{
4965 rtx set = single_set (insn);
4966
4967 if (set && SET_DEST (set) == dst)
4968 return set_unique_reg_note (insn, kind, datum);
4969 return NULL_RTX;
4970}
15bbde2b 4971\f
4972/* Return an indication of which type of insn should have X as a body.
4973 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4974
9b69f75b 4975static enum rtx_code
35cb5232 4976classify_insn (rtx x)
15bbde2b 4977{
6d7dc5b9 4978 if (LABEL_P (x))
15bbde2b 4979 return CODE_LABEL;
4980 if (GET_CODE (x) == CALL)
4981 return CALL_INSN;
9cb2517e 4982 if (ANY_RETURN_P (x))
15bbde2b 4983 return JUMP_INSN;
4984 if (GET_CODE (x) == SET)
4985 {
4986 if (SET_DEST (x) == pc_rtx)
4987 return JUMP_INSN;
4988 else if (GET_CODE (SET_SRC (x)) == CALL)
4989 return CALL_INSN;
4990 else
4991 return INSN;
4992 }
4993 if (GET_CODE (x) == PARALLEL)
4994 {
19cb6b50 4995 int j;
15bbde2b 4996 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4997 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4998 return CALL_INSN;
4999 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5000 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5001 return JUMP_INSN;
5002 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5003 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5004 return CALL_INSN;
5005 }
5006 return INSN;
5007}
5008
5009/* Emit the rtl pattern X as an appropriate kind of insn.
5010 If X is a label, it is simply added into the insn chain. */
5011
5012rtx
35cb5232 5013emit (rtx x)
15bbde2b 5014{
5015 enum rtx_code code = classify_insn (x);
5016
611234b4 5017 switch (code)
15bbde2b 5018 {
611234b4 5019 case CODE_LABEL:
5020 return emit_label (x);
5021 case INSN:
5022 return emit_insn (x);
5023 case JUMP_INSN:
5024 {
5025 rtx insn = emit_jump_insn (x);
5026 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5027 return emit_barrier ();
5028 return insn;
5029 }
5030 case CALL_INSN:
5031 return emit_call_insn (x);
9845d120 5032 case DEBUG_INSN:
5033 return emit_debug_insn (x);
611234b4 5034 default:
5035 gcc_unreachable ();
15bbde2b 5036 }
15bbde2b 5037}
5038\f
1f3233d1 5039/* Space for free sequence stack entries. */
7035b2ab 5040static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
1f3233d1 5041
735f4358 5042/* Begin emitting insns to a sequence. If this sequence will contain
5043 something that might cause the compiler to pop arguments to function
5044 calls (because those pops have previously been deferred; see
5045 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5046 before calling this function. That will ensure that the deferred
5047 pops are not accidentally emitted in the middle of this sequence. */
15bbde2b 5048
5049void
35cb5232 5050start_sequence (void)
15bbde2b 5051{
5052 struct sequence_stack *tem;
5053
1f3233d1 5054 if (free_sequence_stack != NULL)
5055 {
5056 tem = free_sequence_stack;
5057 free_sequence_stack = tem->next;
5058 }
5059 else
ba72912a 5060 tem = ggc_alloc_sequence_stack ();
15bbde2b 5061
0a893c29 5062 tem->next = seq_stack;
06f9d6ef 5063 tem->first = get_insns ();
5064 tem->last = get_last_insn ();
15bbde2b 5065
0a893c29 5066 seq_stack = tem;
15bbde2b 5067
06f9d6ef 5068 set_first_insn (0);
5069 set_last_insn (0);
15bbde2b 5070}
5071
b49854c6 5072/* Set up the insn chain starting with FIRST as the current sequence,
5073 saving the previously current one. See the documentation for
5074 start_sequence for more information about how to use this function. */
15bbde2b 5075
5076void
35cb5232 5077push_to_sequence (rtx first)
15bbde2b 5078{
5079 rtx last;
5080
5081 start_sequence ();
5082
3c802a1e 5083 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5084 ;
15bbde2b 5085
06f9d6ef 5086 set_first_insn (first);
5087 set_last_insn (last);
15bbde2b 5088}
5089
28bf151d 5090/* Like push_to_sequence, but take the last insn as an argument to avoid
5091 looping through the list. */
5092
5093void
5094push_to_sequence2 (rtx first, rtx last)
5095{
5096 start_sequence ();
5097
06f9d6ef 5098 set_first_insn (first);
5099 set_last_insn (last);
28bf151d 5100}
5101
ab74c92f 5102/* Set up the outer-level insn chain
5103 as the current sequence, saving the previously current one. */
5104
5105void
35cb5232 5106push_topmost_sequence (void)
ab74c92f 5107{
2041cfd9 5108 struct sequence_stack *stack, *top = NULL;
ab74c92f 5109
5110 start_sequence ();
5111
0a893c29 5112 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5113 top = stack;
5114
06f9d6ef 5115 set_first_insn (top->first);
5116 set_last_insn (top->last);
ab74c92f 5117}
5118
5119/* After emitting to the outer-level insn chain, update the outer-level
5120 insn chain, and restore the previous saved state. */
5121
5122void
35cb5232 5123pop_topmost_sequence (void)
ab74c92f 5124{
2041cfd9 5125 struct sequence_stack *stack, *top = NULL;
ab74c92f 5126
0a893c29 5127 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5128 top = stack;
5129
06f9d6ef 5130 top->first = get_insns ();
5131 top->last = get_last_insn ();
ab74c92f 5132
5133 end_sequence ();
5134}
5135
15bbde2b 5136/* After emitting to a sequence, restore previous saved state.
5137
b49854c6 5138 To get the contents of the sequence just made, you must call
31d3e01c 5139 `get_insns' *before* calling here.
b49854c6 5140
5141 If the compiler might have deferred popping arguments while
5142 generating this sequence, and this sequence will not be immediately
5143 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 5144 before calling get_insns. That will ensure that the deferred
b49854c6 5145 pops are inserted into this sequence, and not into some random
5146 location in the instruction stream. See INHIBIT_DEFER_POP for more
5147 information about deferred popping of arguments. */
15bbde2b 5148
5149void
35cb5232 5150end_sequence (void)
15bbde2b 5151{
0a893c29 5152 struct sequence_stack *tem = seq_stack;
15bbde2b 5153
06f9d6ef 5154 set_first_insn (tem->first);
5155 set_last_insn (tem->last);
0a893c29 5156 seq_stack = tem->next;
15bbde2b 5157
1f3233d1 5158 memset (tem, 0, sizeof (*tem));
5159 tem->next = free_sequence_stack;
5160 free_sequence_stack = tem;
15bbde2b 5161}
5162
5163/* Return 1 if currently emitting into a sequence. */
5164
5165int
35cb5232 5166in_sequence_p (void)
15bbde2b 5167{
0a893c29 5168 return seq_stack != 0;
15bbde2b 5169}
15bbde2b 5170\f
02ebfa52 5171/* Put the various virtual registers into REGNO_REG_RTX. */
5172
2f3874ce 5173static void
b079a207 5174init_virtual_regs (void)
02ebfa52 5175{
b079a207 5176 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5177 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5178 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5179 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5180 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
60778e62 5181 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5182 = virtual_preferred_stack_boundary_rtx;
0a893c29 5183}
5184
928d57e3 5185\f
5186/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5187static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5188static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5189static int copy_insn_n_scratches;
5190
5191/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5192 copied an ASM_OPERANDS.
5193 In that case, it is the original input-operand vector. */
5194static rtvec orig_asm_operands_vector;
5195
5196/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5197 copied an ASM_OPERANDS.
5198 In that case, it is the copied input-operand vector. */
5199static rtvec copy_asm_operands_vector;
5200
5201/* Likewise for the constraints vector. */
5202static rtvec orig_asm_constraints_vector;
5203static rtvec copy_asm_constraints_vector;
5204
5205/* Recursively create a new copy of an rtx for copy_insn.
5206 This function differs from copy_rtx in that it handles SCRATCHes and
5207 ASM_OPERANDs properly.
5208 Normally, this function is not used directly; use copy_insn as front end.
5209 However, you could first copy an insn pattern with copy_insn and then use
5210 this function afterwards to properly copy any REG_NOTEs containing
5211 SCRATCHes. */
5212
5213rtx
35cb5232 5214copy_insn_1 (rtx orig)
928d57e3 5215{
19cb6b50 5216 rtx copy;
5217 int i, j;
5218 RTX_CODE code;
5219 const char *format_ptr;
928d57e3 5220
25e880b1 5221 if (orig == NULL)
5222 return NULL;
5223
928d57e3 5224 code = GET_CODE (orig);
5225
5226 switch (code)
5227 {
5228 case REG:
d7fce3c8 5229 case DEBUG_EXPR:
0349edce 5230 CASE_CONST_ANY:
928d57e3 5231 case SYMBOL_REF:
5232 case CODE_LABEL:
5233 case PC:
5234 case CC0:
e0691b9a 5235 case RETURN:
9cb2517e 5236 case SIMPLE_RETURN:
928d57e3 5237 return orig;
c09425a0 5238 case CLOBBER:
5239 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5240 return orig;
5241 break;
928d57e3 5242
5243 case SCRATCH:
5244 for (i = 0; i < copy_insn_n_scratches; i++)
5245 if (copy_insn_scratch_in[i] == orig)
5246 return copy_insn_scratch_out[i];
5247 break;
5248
5249 case CONST:
3072d30e 5250 if (shared_const_p (orig))
928d57e3 5251 return orig;
5252 break;
d823ba47 5253
928d57e3 5254 /* A MEM with a constant address is not sharable. The problem is that
5255 the constant address may need to be reloaded. If the mem is shared,
5256 then reloading one copy of this mem will cause all copies to appear
5257 to have been reloaded. */
5258
5259 default:
5260 break;
5261 }
5262
f2d0e9f1 5263 /* Copy the various flags, fields, and other information. We assume
5264 that all fields need copying, and then clear the fields that should
928d57e3 5265 not be copied. That is the sensible default behavior, and forces
5266 us to explicitly document why we are *not* copying a flag. */
f2d0e9f1 5267 copy = shallow_copy_rtx (orig);
928d57e3 5268
5269 /* We do not copy the USED flag, which is used as a mark bit during
5270 walks over the RTL. */
7c25cb91 5271 RTX_FLAG (copy, used) = 0;
928d57e3 5272
5273 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
6720e96c 5274 if (INSN_P (orig))
928d57e3 5275 {
7c25cb91 5276 RTX_FLAG (copy, jump) = 0;
5277 RTX_FLAG (copy, call) = 0;
5278 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5279 }
d823ba47 5280
928d57e3 5281 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5282
5283 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
f2d0e9f1 5284 switch (*format_ptr++)
5285 {
5286 case 'e':
5287 if (XEXP (orig, i) != NULL)
5288 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5289 break;
928d57e3 5290
f2d0e9f1 5291 case 'E':
5292 case 'V':
5293 if (XVEC (orig, i) == orig_asm_constraints_vector)
5294 XVEC (copy, i) = copy_asm_constraints_vector;
5295 else if (XVEC (orig, i) == orig_asm_operands_vector)
5296 XVEC (copy, i) = copy_asm_operands_vector;
5297 else if (XVEC (orig, i) != NULL)
5298 {
5299 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5300 for (j = 0; j < XVECLEN (copy, i); j++)
5301 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5302 }
5303 break;
928d57e3 5304
f2d0e9f1 5305 case 't':
5306 case 'w':
5307 case 'i':
5308 case 's':
5309 case 'S':
5310 case 'u':
5311 case '0':
5312 /* These are left unchanged. */
5313 break;
928d57e3 5314
f2d0e9f1 5315 default:
5316 gcc_unreachable ();
5317 }
928d57e3 5318
5319 if (code == SCRATCH)
5320 {
5321 i = copy_insn_n_scratches++;
611234b4 5322 gcc_assert (i < MAX_RECOG_OPERANDS);
928d57e3 5323 copy_insn_scratch_in[i] = orig;
5324 copy_insn_scratch_out[i] = copy;
5325 }
5326 else if (code == ASM_OPERANDS)
5327 {
d91f2122 5328 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5329 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5330 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5331 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5332 }
5333
5334 return copy;
5335}
5336
5337/* Create a new copy of an rtx.
5338 This function differs from copy_rtx in that it handles SCRATCHes and
5339 ASM_OPERANDs properly.
5340 INSN doesn't really have to be a full INSN; it could be just the
5341 pattern. */
5342rtx
35cb5232 5343copy_insn (rtx insn)
928d57e3 5344{
5345 copy_insn_n_scratches = 0;
5346 orig_asm_operands_vector = 0;
5347 orig_asm_constraints_vector = 0;
5348 copy_asm_operands_vector = 0;
5349 copy_asm_constraints_vector = 0;
5350 return copy_insn_1 (insn);
5351}
02ebfa52 5352
15bbde2b 5353/* Initialize data structures and variables in this file
5354 before generating rtl for each function. */
5355
5356void
35cb5232 5357init_emit (void)
15bbde2b 5358{
06f9d6ef 5359 set_first_insn (NULL);
5360 set_last_insn (NULL);
9845d120 5361 if (MIN_NONDEBUG_INSN_UID)
5362 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5363 else
5364 cur_insn_uid = 1;
5365 cur_debug_insn_uid = 1;
15bbde2b 5366 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
15bbde2b 5367 first_label_num = label_num;
0a893c29 5368 seq_stack = NULL;
15bbde2b 5369
15bbde2b 5370 /* Init the tables that describe all the pseudo regs. */
5371
fd6ffb7c 5372 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5373
fd6ffb7c 5374 crtl->emit.regno_pointer_align
2457c754 5375 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
d4c332ff 5376
ba72912a 5377 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
fcdc122e 5378
936082bb 5379 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5380 memcpy (regno_reg_rtx,
679bcc8d 5381 initial_regno_reg_rtx,
90295bd2 5382 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5383
15bbde2b 5384 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
b079a207 5385 init_virtual_regs ();
888e0d33 5386
5387 /* Indicate that the virtual registers and stack locations are
5388 all pointers. */
e61a0a7f 5389 REG_POINTER (stack_pointer_rtx) = 1;
5390 REG_POINTER (frame_pointer_rtx) = 1;
5391 REG_POINTER (hard_frame_pointer_rtx) = 1;
5392 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5393
e61a0a7f 5394 REG_POINTER (virtual_incoming_args_rtx) = 1;
5395 REG_POINTER (virtual_stack_vars_rtx) = 1;
5396 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5397 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5398 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5399
d4c332ff 5400#ifdef STACK_BOUNDARY
80909c64 5401 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5402 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5403 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5404 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5405
5406 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5407 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5408 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5409 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5410 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5411#endif
5412
89525da0 5413#ifdef INIT_EXPANDERS
5414 INIT_EXPANDERS;
5415#endif
15bbde2b 5416}
5417
6e68dcb2 5418/* Generate a vector constant for mode MODE and constant value CONSTANT. */
886cfd4f 5419
5420static rtx
6e68dcb2 5421gen_const_vector (enum machine_mode mode, int constant)
886cfd4f 5422{
5423 rtx tem;
5424 rtvec v;
5425 int units, i;
5426 enum machine_mode inner;
5427
5428 units = GET_MODE_NUNITS (mode);
5429 inner = GET_MODE_INNER (mode);
5430
069b07bf 5431 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5432
886cfd4f 5433 v = rtvec_alloc (units);
5434
6e68dcb2 5435 /* We need to call this function after we set the scalar const_tiny_rtx
5436 entries. */
5437 gcc_assert (const_tiny_rtx[constant][(int) inner]);
886cfd4f 5438
5439 for (i = 0; i < units; ++i)
6e68dcb2 5440 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
886cfd4f 5441
9426b612 5442 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5443 return tem;
5444}
5445
9426b612 5446/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6e68dcb2 5447 all elements are zero, and the one vector when all elements are one. */
9426b612 5448rtx
35cb5232 5449gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
9426b612 5450{
6e68dcb2 5451 enum machine_mode inner = GET_MODE_INNER (mode);
5452 int nunits = GET_MODE_NUNITS (mode);
5453 rtx x;
9426b612 5454 int i;
5455
6e68dcb2 5456 /* Check to see if all of the elements have the same value. */
5457 x = RTVEC_ELT (v, nunits - 1);
5458 for (i = nunits - 2; i >= 0; i--)
5459 if (RTVEC_ELT (v, i) != x)
5460 break;
5461
5462 /* If the values are all the same, check to see if we can use one of the
5463 standard constant vectors. */
5464 if (i == -1)
5465 {
5466 if (x == CONST0_RTX (inner))
5467 return CONST0_RTX (mode);
5468 else if (x == CONST1_RTX (inner))
5469 return CONST1_RTX (mode);
ba8dfb08 5470 else if (x == CONSTM1_RTX (inner))
5471 return CONSTM1_RTX (mode);
6e68dcb2 5472 }
5473
5474 return gen_rtx_raw_CONST_VECTOR (mode, v);
9426b612 5475}
5476
6d8b68a3 5477/* Initialise global register information required by all functions. */
5478
5479void
5480init_emit_regs (void)
5481{
5482 int i;
d83fcaa1 5483 enum machine_mode mode;
5484 mem_attrs *attrs;
6d8b68a3 5485
5486 /* Reset register attributes */
5487 htab_empty (reg_attrs_htab);
5488
5489 /* We need reg_raw_mode, so initialize the modes now. */
5490 init_reg_modes_target ();
5491
5492 /* Assign register numbers to the globally defined register rtx. */
6d8b68a3 5493 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5494 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5495 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5496 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5497 virtual_incoming_args_rtx =
5498 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5499 virtual_stack_vars_rtx =
5500 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5501 virtual_stack_dynamic_rtx =
5502 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5503 virtual_outgoing_args_rtx =
5504 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5505 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
60778e62 5506 virtual_preferred_stack_boundary_rtx =
5507 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6d8b68a3 5508
5509 /* Initialize RTL for commonly used hard registers. These are
5510 copied into regno_reg_rtx as we begin to compile each function. */
5511 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
679bcc8d 5512 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6d8b68a3 5513
5514#ifdef RETURN_ADDRESS_POINTER_REGNUM
5515 return_address_pointer_rtx
5516 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5517#endif
5518
6d8b68a3 5519 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5520 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5521 else
5522 pic_offset_table_rtx = NULL_RTX;
d83fcaa1 5523
5524 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5525 {
5526 mode = (enum machine_mode) i;
5527 attrs = ggc_alloc_cleared_mem_attrs ();
5528 attrs->align = BITS_PER_UNIT;
5529 attrs->addrspace = ADDR_SPACE_GENERIC;
5530 if (mode != BLKmode)
5531 {
6d58bcba 5532 attrs->size_known_p = true;
5533 attrs->size = GET_MODE_SIZE (mode);
d83fcaa1 5534 if (STRICT_ALIGNMENT)
5535 attrs->align = GET_MODE_ALIGNMENT (mode);
5536 }
5537 mode_mem_attrs[i] = attrs;
5538 }
6d8b68a3 5539}
5540
01703575 5541/* Create some permanent unique rtl objects shared between all functions. */
15bbde2b 5542
5543void
01703575 5544init_emit_once (void)
15bbde2b 5545{
5546 int i;
5547 enum machine_mode mode;
9e042f31 5548 enum machine_mode double_mode;
15bbde2b 5549
e397ad8e 5550 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5551 hash tables. */
573aba85 5552 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5553 const_int_htab_eq, NULL);
c6259b83 5554
573aba85 5555 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5556 const_double_htab_eq, NULL);
2ff23ed0 5557
e397ad8e 5558 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5559 const_fixed_htab_eq, NULL);
5560
573aba85 5561 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5562 mem_attrs_htab_eq, NULL);
ca74b940 5563 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5564 reg_attrs_htab_eq, NULL);
77695070 5565
71d7daa2 5566 /* Compute the word and byte modes. */
5567
5568 byte_mode = VOIDmode;
5569 word_mode = VOIDmode;
5570 double_mode = VOIDmode;
5571
069b07bf 5572 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5573 mode != VOIDmode;
71d7daa2 5574 mode = GET_MODE_WIDER_MODE (mode))
5575 {
5576 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5577 && byte_mode == VOIDmode)
5578 byte_mode = mode;
5579
5580 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5581 && word_mode == VOIDmode)
5582 word_mode = mode;
5583 }
5584
069b07bf 5585 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5586 mode != VOIDmode;
71d7daa2 5587 mode = GET_MODE_WIDER_MODE (mode))
5588 {
5589 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5590 && double_mode == VOIDmode)
5591 double_mode = mode;
5592 }
5593
5594 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5595
57c097d5 5596#ifdef INIT_EXPANDERS
ab5beff9 5597 /* This is to initialize {init|mark|free}_machine_status before the first
5598 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5599 end which calls push_function_context_to before the first call to
57c097d5 5600 init_function_start. */
5601 INIT_EXPANDERS;
5602#endif
5603
15bbde2b 5604 /* Create the unique rtx's for certain rtx codes and operand values. */
5605
8fd5918e 5606 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
7014838c 5607 tries to use these variables. */
15bbde2b 5608 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5609 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5610 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5611
1a60f06a 5612 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5613 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5614 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5615 else
3ad7bb1c 5616 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5617
2ff23ed0 5618 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5619 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5620 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
3fa759a9 5621
5622 dconstm1 = dconst1;
5623 dconstm1.sign = 1;
77e89269 5624
5625 dconsthalf = dconst1;
9d96125b 5626 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
15bbde2b 5627
ba8dfb08 5628 for (i = 0; i < 3; i++)
15bbde2b 5629 {
3fa759a9 5630 const REAL_VALUE_TYPE *const r =
badfe841 5631 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5632
069b07bf 5633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5634 mode != VOIDmode;
5635 mode = GET_MODE_WIDER_MODE (mode))
5636 const_tiny_rtx[i][(int) mode] =
5637 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5638
5639 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5640 mode != VOIDmode;
15bbde2b 5641 mode = GET_MODE_WIDER_MODE (mode))
2ff23ed0 5642 const_tiny_rtx[i][(int) mode] =
5643 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
15bbde2b 5644
b572011e 5645 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5646
069b07bf 5647 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5648 mode != VOIDmode;
15bbde2b 5649 mode = GET_MODE_WIDER_MODE (mode))
b572011e 5650 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5651
8c20007a 5652 for (mode = MIN_MODE_PARTIAL_INT;
5653 mode <= MAX_MODE_PARTIAL_INT;
5654 mode = (enum machine_mode)((int)(mode) + 1))
7540dcc4 5655 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5656 }
5657
ba8dfb08 5658 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5659
5660 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5661 mode != VOIDmode;
5662 mode = GET_MODE_WIDER_MODE (mode))
5663 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5664
8c20007a 5665 for (mode = MIN_MODE_PARTIAL_INT;
5666 mode <= MAX_MODE_PARTIAL_INT;
5667 mode = (enum machine_mode)((int)(mode) + 1))
dd276d20 5668 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5669
4248fc32 5670 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5671 mode != VOIDmode;
5672 mode = GET_MODE_WIDER_MODE (mode))
5673 {
5674 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5675 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5676 }
5677
5678 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5679 mode != VOIDmode;
5680 mode = GET_MODE_WIDER_MODE (mode))
5681 {
5682 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5683 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5684 }
5685
886cfd4f 5686 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5687 mode != VOIDmode;
5688 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5689 {
5690 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5691 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
ba8dfb08 5692 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6e68dcb2 5693 }
886cfd4f 5694
5695 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5696 mode != VOIDmode;
5697 mode = GET_MODE_WIDER_MODE (mode))
6e68dcb2 5698 {
5699 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5700 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5701 }
886cfd4f 5702
06f0b99c 5703 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5704 mode != VOIDmode;
5705 mode = GET_MODE_WIDER_MODE (mode))
5706 {
5707 FCONST0(mode).data.high = 0;
5708 FCONST0(mode).data.low = 0;
5709 FCONST0(mode).mode = mode;
e397ad8e 5710 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5711 FCONST0 (mode), mode);
06f0b99c 5712 }
5713
5714 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5715 mode != VOIDmode;
5716 mode = GET_MODE_WIDER_MODE (mode))
5717 {
5718 FCONST0(mode).data.high = 0;
5719 FCONST0(mode).data.low = 0;
5720 FCONST0(mode).mode = mode;
e397ad8e 5721 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5722 FCONST0 (mode), mode);
06f0b99c 5723 }
5724
5725 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5726 mode != VOIDmode;
5727 mode = GET_MODE_WIDER_MODE (mode))
5728 {
5729 FCONST0(mode).data.high = 0;
5730 FCONST0(mode).data.low = 0;
5731 FCONST0(mode).mode = mode;
e397ad8e 5732 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5733 FCONST0 (mode), mode);
06f0b99c 5734
5735 /* We store the value 1. */
5736 FCONST1(mode).data.high = 0;
5737 FCONST1(mode).data.low = 0;
5738 FCONST1(mode).mode = mode;
5739 lshift_double (1, 0, GET_MODE_FBIT (mode),
24cd46a7 5740 HOST_BITS_PER_DOUBLE_INT,
06f0b99c 5741 &FCONST1(mode).data.low,
5742 &FCONST1(mode).data.high,
5743 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5744 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5745 FCONST1 (mode), mode);
06f0b99c 5746 }
5747
5748 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5749 mode != VOIDmode;
5750 mode = GET_MODE_WIDER_MODE (mode))
5751 {
5752 FCONST0(mode).data.high = 0;
5753 FCONST0(mode).data.low = 0;
5754 FCONST0(mode).mode = mode;
e397ad8e 5755 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5756 FCONST0 (mode), mode);
06f0b99c 5757
5758 /* We store the value 1. */
5759 FCONST1(mode).data.high = 0;
5760 FCONST1(mode).data.low = 0;
5761 FCONST1(mode).mode = mode;
5762 lshift_double (1, 0, GET_MODE_FBIT (mode),
24cd46a7 5763 HOST_BITS_PER_DOUBLE_INT,
06f0b99c 5764 &FCONST1(mode).data.low,
5765 &FCONST1(mode).data.high,
5766 SIGNED_FIXED_POINT_MODE_P (mode));
e397ad8e 5767 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5768 FCONST1 (mode), mode);
5769 }
5770
5771 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5772 mode != VOIDmode;
5773 mode = GET_MODE_WIDER_MODE (mode))
5774 {
5775 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5776 }
5777
5778 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5779 mode != VOIDmode;
5780 mode = GET_MODE_WIDER_MODE (mode))
5781 {
5782 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5783 }
5784
5785 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5786 mode != VOIDmode;
5787 mode = GET_MODE_WIDER_MODE (mode))
5788 {
5789 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5790 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5791 }
5792
5793 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5794 mode != VOIDmode;
5795 mode = GET_MODE_WIDER_MODE (mode))
5796 {
5797 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5798 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
06f0b99c 5799 }
5800
0fd4500a 5801 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5802 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5803 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 5804
065336b4 5805 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5806 if (STORE_FLAG_VALUE == 1)
5807 const_tiny_rtx[1][(int) BImode] = const1_rtx;
7d7b0bac 5808
5809 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5810 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5811 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5812 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
15bbde2b 5813}
ac6c481d 5814\f
cd0fe062 5815/* Produce exact duplicate of insn INSN after AFTER.
5816 Care updating of libcall regions if present. */
5817
5818rtx
35cb5232 5819emit_copy_of_insn_after (rtx insn, rtx after)
cd0fe062 5820{
9ce37fa7 5821 rtx new_rtx, link;
cd0fe062 5822
5823 switch (GET_CODE (insn))
5824 {
5825 case INSN:
9ce37fa7 5826 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5827 break;
5828
5829 case JUMP_INSN:
9ce37fa7 5830 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5831 break;
5832
9845d120 5833 case DEBUG_INSN:
5834 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5835 break;
5836
cd0fe062 5837 case CALL_INSN:
9ce37fa7 5838 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
cd0fe062 5839 if (CALL_INSN_FUNCTION_USAGE (insn))
9ce37fa7 5840 CALL_INSN_FUNCTION_USAGE (new_rtx)
cd0fe062 5841 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
9ce37fa7 5842 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5843 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5844 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
48e1416a 5845 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
9c2a0c05 5846 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
cd0fe062 5847 break;
5848
5849 default:
611234b4 5850 gcc_unreachable ();
cd0fe062 5851 }
5852
5853 /* Update LABEL_NUSES. */
9ce37fa7 5854 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
cd0fe062 5855
9ce37fa7 5856 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ab87d1bc 5857
98116afd 5858 /* If the old insn is frame related, then so is the new one. This is
5859 primarily needed for IA-64 unwind info which marks epilogue insns,
5860 which may be duplicated by the basic block reordering code. */
9ce37fa7 5861 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
98116afd 5862
19d2fe05 5863 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5864 will make them. REG_LABEL_TARGETs are created there too, but are
5865 supposed to be sticky, so we copy them. */
cd0fe062 5866 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
19d2fe05 5867 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
cd0fe062 5868 {
5869 if (GET_CODE (link) == EXPR_LIST)
9ce37fa7 5870 add_reg_note (new_rtx, REG_NOTE_KIND (link),
a1ddb869 5871 copy_insn_1 (XEXP (link, 0)));
cd0fe062 5872 else
9ce37fa7 5873 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
cd0fe062 5874 }
5875
9ce37fa7 5876 INSN_CODE (new_rtx) = INSN_CODE (insn);
5877 return new_rtx;
cd0fe062 5878}
1f3233d1 5879
7035b2ab 5880static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
c09425a0 5881rtx
5882gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5883{
5884 if (hard_reg_clobbers[mode][regno])
5885 return hard_reg_clobbers[mode][regno];
5886 else
5887 return (hard_reg_clobbers[mode][regno] =
5888 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5889}
5890
23a070f3 5891/* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
5892 numbers and files. In order to be GGC friendly we need to use separate
5893 varrays. This also slightly improve the memory locality in binary search.
5894 The _locs array contains locators where the given property change. The
5895 block_locators_blocks contains the scope block that is used for all insn
5896 locator greater than corresponding block_locators_locs value and smaller
5897 than the following one. Similarly for the other properties. */
5898static VEC(int,heap) *block_locators_locs;
5899static GTY(()) VEC(tree,gc) *block_locators_blocks;
5900static VEC(int,heap) *locations_locators_locs;
65f4cf9c 5901DEF_VEC_A(location_t);
5902DEF_VEC_ALLOC_A(location_t,heap);
23a070f3 5903static VEC(location_t,heap) *locations_locators_vals;
5904int prologue_locator;
5905int epilogue_locator;
5906
5907/* Hold current location information and last location information, so the
5908 datastructures are built lazily only when some instructions in given
5909 place are needed. */
5910static location_t curr_location, last_location;
5911static tree curr_block, last_block;
5912static int curr_rtl_loc = -1;
5913
5914/* Allocate insn locator datastructure. */
5915void
5916insn_locators_alloc (void)
5917{
5918 prologue_locator = epilogue_locator = 0;
5919
5920 block_locators_locs = VEC_alloc (int, heap, 32);
5921 block_locators_blocks = VEC_alloc (tree, gc, 32);
5922 locations_locators_locs = VEC_alloc (int, heap, 32);
5923 locations_locators_vals = VEC_alloc (location_t, heap, 32);
5924
5925 curr_location = UNKNOWN_LOCATION;
5926 last_location = UNKNOWN_LOCATION;
5927 curr_block = NULL;
5928 last_block = NULL;
5929 curr_rtl_loc = 0;
5930}
5931
5932/* At the end of emit stage, clear current location. */
5933void
5934insn_locators_finalize (void)
5935{
5936 if (curr_rtl_loc >= 0)
5937 epilogue_locator = curr_insn_locator ();
5938 curr_rtl_loc = -1;
5939}
5940
5941/* Allocate insn locator datastructure. */
5942void
5943insn_locators_free (void)
5944{
5945 prologue_locator = epilogue_locator = 0;
5946
5947 VEC_free (int, heap, block_locators_locs);
5948 VEC_free (tree,gc, block_locators_blocks);
5949 VEC_free (int, heap, locations_locators_locs);
5950 VEC_free (location_t, heap, locations_locators_vals);
5951}
5952
5953/* Set current location. */
5954void
5955set_curr_insn_source_location (location_t location)
5956{
5957 /* IV opts calls into RTL expansion to compute costs of operations. At this
5958 time locators are not initialized. */
5959 if (curr_rtl_loc == -1)
5960 return;
5961 curr_location = location;
5962}
5963
5964/* Get current location. */
5965location_t
5966get_curr_insn_source_location (void)
5967{
5968 return curr_location;
5969}
5970
5971/* Set current scope block. */
5972void
5973set_curr_insn_block (tree b)
5974{
5975 /* IV opts calls into RTL expansion to compute costs of operations. At this
5976 time locators are not initialized. */
5977 if (curr_rtl_loc == -1)
5978 return;
5979 if (b)
5980 curr_block = b;
5981}
5982
5983/* Get current scope block. */
5984tree
5985get_curr_insn_block (void)
5986{
5987 return curr_block;
5988}
5989
5990/* Return current insn locator. */
5991int
5992curr_insn_locator (void)
5993{
5994 if (curr_rtl_loc == -1 || curr_location == UNKNOWN_LOCATION)
5995 return 0;
5996 if (last_block != curr_block)
5997 {
5998 curr_rtl_loc++;
5999 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
6000 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
6001 last_block = curr_block;
6002 }
6003 if (last_location != curr_location)
6004 {
6005 curr_rtl_loc++;
6006 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
6007 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
6008 last_location = curr_location;
6009 }
6010 return curr_rtl_loc;
6011}
6012\f
6013
6014/* Return lexical scope block locator belongs to. */
6015static tree
6016locator_scope (int loc)
6017{
6018 int max = VEC_length (int, block_locators_locs);
6019 int min = 0;
6020
6021 /* When block_locators_locs was initialized, the pro- and epilogue
6022 insns didn't exist yet and can therefore not be found this way.
6023 But we know that they belong to the outer most block of the
6024 current function.
6025 Without this test, the prologue would be put inside the block of
6026 the first valid instruction in the function and when that first
6027 insn is part of an inlined function then the low_pc of that
6028 inlined function is messed up. Likewise for the epilogue and
6029 the last valid instruction. */
6030 if (loc == prologue_locator || loc == epilogue_locator)
6031 return DECL_INITIAL (cfun->decl);
6032
6033 if (!max || !loc)
6034 return NULL;
6035 while (1)
6036 {
6037 int pos = (min + max) / 2;
6038 int tmp = VEC_index (int, block_locators_locs, pos);
6039
6040 if (tmp <= loc && min != pos)
6041 min = pos;
6042 else if (tmp > loc && max != pos)
6043 max = pos;
6044 else
6045 {
6046 min = pos;
6047 break;
6048 }
6049 }
6050 return VEC_index (tree, block_locators_blocks, min);
6051}
6052
6053/* Return lexical scope block insn belongs to. */
6054tree
6055insn_scope (const_rtx insn)
6056{
6057 return locator_scope (INSN_LOCATOR (insn));
6058}
6059
6060/* Return line number of the statement specified by the locator. */
6061location_t
6062locator_location (int loc)
6063{
6064 int max = VEC_length (int, locations_locators_locs);
6065 int min = 0;
6066
6067 while (1)
6068 {
6069 int pos = (min + max) / 2;
6070 int tmp = VEC_index (int, locations_locators_locs, pos);
6071
6072 if (tmp <= loc && min != pos)
6073 min = pos;
6074 else if (tmp > loc && max != pos)
6075 max = pos;
6076 else
6077 {
6078 min = pos;
6079 break;
6080 }
6081 }
2b15d2ba 6082 return VEC_index (location_t, locations_locators_vals, min);
23a070f3 6083}
6084
6085/* Return source line of the statement that produced this insn. */
6086int
6087locator_line (int loc)
6088{
6089 expanded_location xloc;
6090 if (!loc)
6091 return 0;
6092 else
6093 xloc = expand_location (locator_location (loc));
6094 return xloc.line;
6095}
6096
6097/* Return line number of the statement that produced this insn. */
6098int
6099insn_line (const_rtx insn)
6100{
6101 return locator_line (INSN_LOCATOR (insn));
6102}
6103
6104/* Return source file of the statement specified by LOC. */
6105const char *
6106locator_file (int loc)
6107{
6108 expanded_location xloc;
6109 if (!loc)
6110 return 0;
6111 else
6112 xloc = expand_location (locator_location (loc));
6113 return xloc.file;
6114}
6115
6116/* Return source file of the statement that produced this insn. */
6117const char *
6118insn_file (const_rtx insn)
6119{
6120 return locator_file (INSN_LOCATOR (insn));
6121}
6122
6123/* Return true if LOC1 and LOC2 locators have the same location and scope. */
6124bool
6125locator_eq (int loc1, int loc2)
6126{
6127 if (loc1 == loc2)
6128 return true;
6129 if (locator_location (loc1) != locator_location (loc2))
6130 return false;
6131 return locator_scope (loc1) == locator_scope (loc2);
6132}
6133\f
30c3c442 6134
6135/* Return true if memory model MODEL requires a pre-operation (release-style)
6136 barrier or a post-operation (acquire-style) barrier. While not universal,
6137 this function matches behavior of several targets. */
6138
6139bool
6140need_atomic_barrier_p (enum memmodel model, bool pre)
6141{
6142 switch (model)
6143 {
6144 case MEMMODEL_RELAXED:
6145 case MEMMODEL_CONSUME:
6146 return false;
6147 case MEMMODEL_RELEASE:
6148 return pre;
6149 case MEMMODEL_ACQUIRE:
6150 return !pre;
6151 case MEMMODEL_ACQ_REL:
6152 case MEMMODEL_SEQ_CST:
6153 return true;
6154 default:
6155 gcc_unreachable ();
6156 }
6157}
6158\f
1f3233d1 6159#include "gt-emit-rtl.h"